storage.py 65 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710
  1. # This file is part of Radicale Server - Calendar Server
  2. # Copyright © 2014 Jean-Marc Martins
  3. # Copyright © 2012-2017 Guillaume Ayoub
  4. #
  5. # This library is free software: you can redistribute it and/or modify
  6. # it under the terms of the GNU General Public License as published by
  7. # the Free Software Foundation, either version 3 of the License, or
  8. # (at your option) any later version.
  9. #
  10. # This library is distributed in the hope that it will be useful,
  11. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. # GNU General Public License for more details.
  14. #
  15. # You should have received a copy of the GNU General Public License
  16. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  17. """
  18. Storage backends.
  19. This module loads the storage backend, according to the storage configuration.
  20. Default storage uses one folder per collection and one file per collection
  21. entry.
  22. """
  23. import binascii
  24. import contextlib
  25. import json
  26. import logging
  27. import os
  28. import pickle
  29. import posixpath
  30. import shlex
  31. import subprocess
  32. import sys
  33. import threading
  34. import time
  35. from contextlib import contextmanager
  36. from hashlib import md5
  37. from importlib import import_module
  38. from itertools import chain, groupby
  39. from math import log
  40. from random import getrandbits
  41. from tempfile import NamedTemporaryFile, TemporaryDirectory
  42. import vobject
  43. if sys.version_info >= (3, 5):
  44. # HACK: Avoid import cycle for Python < 3.5
  45. from radicale import xmlutils
  46. if os.name == "nt":
  47. import ctypes
  48. import ctypes.wintypes
  49. import msvcrt
  50. LOCKFILE_EXCLUSIVE_LOCK = 2
  51. if ctypes.sizeof(ctypes.c_void_p) == 4:
  52. ULONG_PTR = ctypes.c_uint32
  53. else:
  54. ULONG_PTR = ctypes.c_uint64
  55. class Overlapped(ctypes.Structure):
  56. _fields_ = [
  57. ("internal", ULONG_PTR),
  58. ("internal_high", ULONG_PTR),
  59. ("offset", ctypes.wintypes.DWORD),
  60. ("offset_high", ctypes.wintypes.DWORD),
  61. ("h_event", ctypes.wintypes.HANDLE)]
  62. lock_file_ex = ctypes.windll.kernel32.LockFileEx
  63. lock_file_ex.argtypes = [
  64. ctypes.wintypes.HANDLE,
  65. ctypes.wintypes.DWORD,
  66. ctypes.wintypes.DWORD,
  67. ctypes.wintypes.DWORD,
  68. ctypes.wintypes.DWORD,
  69. ctypes.POINTER(Overlapped)]
  70. lock_file_ex.restype = ctypes.wintypes.BOOL
  71. unlock_file_ex = ctypes.windll.kernel32.UnlockFileEx
  72. unlock_file_ex.argtypes = [
  73. ctypes.wintypes.HANDLE,
  74. ctypes.wintypes.DWORD,
  75. ctypes.wintypes.DWORD,
  76. ctypes.wintypes.DWORD,
  77. ctypes.POINTER(Overlapped)]
  78. unlock_file_ex.restype = ctypes.wintypes.BOOL
  79. elif os.name == "posix":
  80. import fcntl
  81. INTERNAL_TYPES = ("multifilesystem",)
  82. def load(configuration, logger):
  83. """Load the storage manager chosen in configuration."""
  84. if sys.version_info < (3, 5):
  85. # HACK: Avoid import cycle for Python < 3.5
  86. global xmlutils
  87. from radicale import xmlutils
  88. storage_type = configuration.get("storage", "type")
  89. if storage_type == "multifilesystem":
  90. collection_class = Collection
  91. else:
  92. try:
  93. collection_class = import_module(storage_type).Collection
  94. except Exception as e:
  95. raise RuntimeError("Failed to load storage module %r: %s" %
  96. (storage_type, e)) from e
  97. logger.info("Storage type is %r", storage_type)
  98. class CollectionCopy(collection_class):
  99. """Collection copy, avoids overriding the original class attributes."""
  100. CollectionCopy.configuration = configuration
  101. CollectionCopy.logger = logger
  102. CollectionCopy.static_init()
  103. return CollectionCopy
  104. def check_and_sanitize_item(vobject_item, is_collection=False, uid=None,
  105. tag=None):
  106. """Check vobject items for common errors and add missing UIDs.
  107. ``is_collection`` indicates that vobject_item contains unrelated
  108. components.
  109. If ``uid`` is not set, the UID is generated randomly.
  110. The ``tag`` of the collection.
  111. """
  112. if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"):
  113. raise ValueError("Unsupported collection tag: %r" % tag)
  114. if vobject_item.name == "VCALENDAR" and tag == "VCALENDAR":
  115. component_name = None
  116. object_uid = None
  117. object_uid_set = False
  118. for component in vobject_item.components():
  119. # https://tools.ietf.org/html/rfc4791#section-4.1
  120. if component.name == "VTIMEZONE":
  121. continue
  122. if component_name is None or is_collection:
  123. component_name = component.name
  124. elif component_name != component.name:
  125. raise ValueError("Multiple component types in object: %r, %r" %
  126. (component_name, component.name))
  127. if component_name not in ("VTODO", "VEVENT", "VJOURNAL"):
  128. continue
  129. component_uid = get_uid(component)
  130. if not object_uid_set or is_collection:
  131. object_uid_set = True
  132. object_uid = component_uid
  133. if component_uid is None:
  134. component.add("UID").value = uid or random_uuid4()
  135. elif not component_uid:
  136. component.uid.value = uid or random_uuid4()
  137. elif not object_uid or not component_uid:
  138. raise ValueError("Multiple %s components without UID in "
  139. "object" % component_name)
  140. elif object_uid != component_uid:
  141. raise ValueError(
  142. "Multiple %s components with different UIDs in object: "
  143. "%r, %r" % (component_name, object_uid, component_uid))
  144. # vobject interprets recurrence rules on demand
  145. try:
  146. component.rruleset
  147. except Exception as e:
  148. raise ValueError("invalid recurrence rules in %s" %
  149. component.name) from e
  150. elif vobject_item.name == "VCARD" and tag == "VADDRESSBOOK":
  151. # https://tools.ietf.org/html/rfc6352#section-5.1
  152. object_uid = get_uid(vobject_item)
  153. if object_uid is None:
  154. vobject_item.add("UID").value = uid or random_uuid4()
  155. elif not object_uid:
  156. vobject_item.uid.value = uid or random_uuid4()
  157. elif vobject_item.name == "VLIST" and tag == "VADDRESSBOOK":
  158. # Custom format used by SOGo Connector to store lists of contacts
  159. pass
  160. else:
  161. raise ValueError("Item type %r not supported in %s collection" %
  162. (vobject_item.name, repr(tag) if tag else "generic"))
  163. def check_and_sanitize_props(props):
  164. """Check collection properties for common errors."""
  165. tag = props.get("tag")
  166. if tag and tag not in ("VCALENDAR", "VADDRESSBOOK"):
  167. raise ValueError("Unsupported collection tag: %r" % tag)
  168. def random_uuid4():
  169. """Generate a pseudo-random UUID"""
  170. r = "%016x" % getrandbits(128)
  171. return "%s-%s-%s-%s-%s" % (r[:8], r[8:12], r[12:16], r[16:20], r[20:])
  172. def scandir(path, only_dirs=False, only_files=False):
  173. """Iterator for directory elements. (For compatibility with Python < 3.5)
  174. ``only_dirs`` only return directories
  175. ``only_files`` only return files
  176. """
  177. if sys.version_info >= (3, 5):
  178. for entry in os.scandir(path):
  179. if ((not only_files or entry.is_file()) and
  180. (not only_dirs or entry.is_dir())):
  181. yield entry.name
  182. else:
  183. for name in os.listdir(path):
  184. p = os.path.join(path, name)
  185. if ((not only_files or os.path.isfile(p)) and
  186. (not only_dirs or os.path.isdir(p))):
  187. yield name
  188. def get_etag(text):
  189. """Etag from collection or item.
  190. Encoded as quoted-string (see RFC 2616).
  191. """
  192. etag = md5()
  193. etag.update(text.encode("utf-8"))
  194. return '"%s"' % etag.hexdigest()
  195. def get_uid(vobject_component):
  196. """UID value of an item if defined."""
  197. return (vobject_component.uid.value
  198. if hasattr(vobject_component, "uid") else None)
  199. def get_uid_from_object(vobject_item):
  200. """UID value of an calendar/addressbook object."""
  201. if vobject_item.name == "VCALENDAR":
  202. if hasattr(vobject_item, "vevent"):
  203. return get_uid(vobject_item.vevent)
  204. if hasattr(vobject_item, "vjournal"):
  205. return get_uid(vobject_item.vjournal)
  206. if hasattr(vobject_item, "vtodo"):
  207. return get_uid(vobject_item.vtodo)
  208. elif vobject_item.name == "VCARD":
  209. return get_uid(vobject_item)
  210. return None
  211. def sanitize_path(path):
  212. """Make path absolute with leading slash to prevent access to other data.
  213. Preserve a potential trailing slash.
  214. """
  215. trailing_slash = "/" if path.endswith("/") else ""
  216. path = posixpath.normpath(path)
  217. new_path = "/"
  218. for part in path.split("/"):
  219. if not is_safe_path_component(part):
  220. continue
  221. new_path = posixpath.join(new_path, part)
  222. trailing_slash = "" if new_path.endswith("/") else trailing_slash
  223. return new_path + trailing_slash
  224. def is_safe_path_component(path):
  225. """Check if path is a single component of a path.
  226. Check that the path is safe to join too.
  227. """
  228. return path and "/" not in path and path not in (".", "..")
  229. def is_safe_filesystem_path_component(path):
  230. """Check if path is a single component of a local and posix filesystem
  231. path.
  232. Check that the path is safe to join too.
  233. """
  234. return (
  235. path and not os.path.splitdrive(path)[0] and
  236. not os.path.split(path)[0] and path not in (os.curdir, os.pardir) and
  237. not path.startswith(".") and not path.endswith("~") and
  238. is_safe_path_component(path))
  239. def path_to_filesystem(root, *paths):
  240. """Convert path to a local filesystem path relative to base_folder.
  241. `root` must be a secure filesystem path, it will be prepend to the path.
  242. Conversion of `paths` is done in a secure manner, or raises ``ValueError``.
  243. """
  244. paths = [sanitize_path(path).strip("/") for path in paths]
  245. safe_path = root
  246. for path in paths:
  247. if not path:
  248. continue
  249. for part in path.split("/"):
  250. if not is_safe_filesystem_path_component(part):
  251. raise UnsafePathError(part)
  252. safe_path_parent = safe_path
  253. safe_path = os.path.join(safe_path, part)
  254. # Check for conflicting files (e.g. case-insensitive file systems
  255. # or short names on Windows file systems)
  256. if (os.path.lexists(safe_path) and
  257. part not in scandir(safe_path_parent)):
  258. raise CollidingPathError(part)
  259. return safe_path
  260. def left_encode_int(v):
  261. length = int(log(v, 256)) + 1 if v != 0 else 1
  262. return bytes((length,)) + v.to_bytes(length, 'little')
  263. class UnsafePathError(ValueError):
  264. def __init__(self, path):
  265. message = "Can't translate name safely to filesystem: %r" % path
  266. super().__init__(message)
  267. class CollidingPathError(ValueError):
  268. def __init__(self, path):
  269. message = "File name collision: %r" % path
  270. super().__init__(message)
  271. class ComponentExistsError(ValueError):
  272. def __init__(self, path):
  273. message = "Component already exists: %r" % path
  274. super().__init__(message)
  275. class ComponentNotFoundError(ValueError):
  276. def __init__(self, path):
  277. message = "Component doesn't exist: %r" % path
  278. super().__init__(message)
  279. class Item:
  280. def __init__(self, collection, item=None, href=None, last_modified=None,
  281. text=None, etag=None, uid=None, name=None,
  282. component_name=None):
  283. """Initialize an item.
  284. ``collection`` the parent collection.
  285. ``href`` the href of the item.
  286. ``last_modified`` the HTTP-datetime of when the item was modified.
  287. ``text`` the text representation of the item (optional if ``item`` is
  288. set).
  289. ``item`` the vobject item (optional if ``text`` is set).
  290. ``etag`` the etag of the item (optional). See ``get_etag``.
  291. ``uid`` the UID of the object (optional). See ``get_uid_from_object``.
  292. """
  293. if text is None and item is None:
  294. raise ValueError("at least one of 'text' or 'item' must be set")
  295. self.collection = collection
  296. self.href = href
  297. self.last_modified = last_modified
  298. self._text = text
  299. self._item = item
  300. self._etag = etag
  301. self._uid = uid
  302. self._name = name
  303. self._component_name = component_name
  304. def __getattr__(self, attr):
  305. return getattr(self.item, attr)
  306. def serialize(self):
  307. if self._text is None:
  308. try:
  309. self._text = self.item.serialize()
  310. except Exception as e:
  311. raise RuntimeError("Failed to serialize item %r from %r: %s" %
  312. (self.href, self.collection.path, e)) from e
  313. return self._text
  314. @property
  315. def item(self):
  316. if self._item is None:
  317. try:
  318. self._item = vobject.readOne(self._text)
  319. except Exception as e:
  320. raise RuntimeError("Failed to parse item %r from %r: %s" %
  321. (self.href, self.collection.path, e)) from e
  322. return self._item
  323. @property
  324. def etag(self):
  325. """Encoded as quoted-string (see RFC 2616)."""
  326. if self._etag is None:
  327. self._etag = get_etag(self.serialize())
  328. return self._etag
  329. @property
  330. def uid(self):
  331. if self._uid is None:
  332. self._uid = get_uid_from_object(self.item)
  333. return self._uid
  334. @property
  335. def name(self):
  336. if self._name is not None:
  337. return self._name
  338. return self.item.name
  339. @property
  340. def component_name(self):
  341. if self._component_name is not None:
  342. return self._component_name
  343. return xmlutils.find_tag(self.item)
  344. class BaseCollection:
  345. # Overriden on copy by the "load" function
  346. configuration = None
  347. logger = None
  348. # Properties of instance
  349. """The sanitized path of the collection without leading or trailing ``/``.
  350. """
  351. path = ""
  352. @classmethod
  353. def static_init():
  354. """init collection copy"""
  355. pass
  356. @property
  357. def owner(self):
  358. """The owner of the collection."""
  359. return self.path.split("/", maxsplit=1)[0]
  360. @property
  361. def is_principal(self):
  362. """Collection is a principal."""
  363. return bool(self.path) and "/" not in self.path
  364. @owner.setter
  365. def owner(self, value):
  366. # DEPRECATED: Included for compatibility reasons
  367. pass
  368. @is_principal.setter
  369. def is_principal(self, value):
  370. # DEPRECATED: Included for compatibility reasons
  371. pass
  372. @classmethod
  373. def discover(cls, path, depth="0"):
  374. """Discover a list of collections under the given ``path``.
  375. ``path`` is sanitized.
  376. If ``depth`` is "0", only the actual object under ``path`` is
  377. returned.
  378. If ``depth`` is anything but "0", it is considered as "1" and direct
  379. children are included in the result.
  380. The root collection "/" must always exist.
  381. """
  382. raise NotImplementedError
  383. @classmethod
  384. def move(cls, item, to_collection, to_href):
  385. """Move an object.
  386. ``item`` is the item to move.
  387. ``to_collection`` is the target collection.
  388. ``to_href`` is the target name in ``to_collection``. An item with the
  389. same name might already exist.
  390. """
  391. if item.collection.path == to_collection.path and item.href == to_href:
  392. return
  393. to_collection.upload(to_href, item.item)
  394. item.collection.delete(item.href)
  395. @property
  396. def etag(self):
  397. """Encoded as quoted-string (see RFC 2616)."""
  398. etag = md5()
  399. for item in self.get_all():
  400. etag.update((item.href + "/" + item.etag).encode("utf-8"))
  401. etag.update(json.dumps(self.get_meta(), sort_keys=True).encode())
  402. return '"%s"' % etag.hexdigest()
  403. @classmethod
  404. def create_collection(cls, href, collection=None, props=None):
  405. """Create a collection.
  406. ``href`` is the sanitized path.
  407. If the collection already exists and neither ``collection`` nor
  408. ``props`` are set, this method shouldn't do anything. Otherwise the
  409. existing collection must be replaced.
  410. ``collection`` is a list of vobject components.
  411. ``props`` are metadata values for the collection.
  412. ``props["tag"]`` is the type of collection (VCALENDAR or
  413. VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the
  414. collection.
  415. """
  416. raise NotImplementedError
  417. def sync(self, old_token=None):
  418. """Get the current sync token and changed items for synchronization.
  419. ``old_token`` an old sync token which is used as the base of the
  420. delta update. If sync token is missing, all items are returned.
  421. ValueError is raised for invalid or old tokens.
  422. WARNING: This simple default implementation treats all sync-token as
  423. invalid. It adheres to the specification but some clients
  424. (e.g. InfCloud) don't like it. Subclasses should provide a
  425. more sophisticated implementation.
  426. """
  427. token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
  428. if old_token:
  429. raise ValueError("Sync token are not supported")
  430. return token, self.list()
  431. def list(self):
  432. """List collection items."""
  433. raise NotImplementedError
  434. def get(self, href):
  435. """Fetch a single item."""
  436. raise NotImplementedError
  437. def get_multi(self, hrefs):
  438. """Fetch multiple items. Duplicate hrefs must be ignored.
  439. DEPRECATED: use ``get_multi2`` instead
  440. """
  441. return (self.get(href) for href in set(hrefs))
  442. def get_multi2(self, hrefs):
  443. """Fetch multiple items.
  444. Functionally similar to ``get``, but might bring performance benefits
  445. on some storages when used cleverly. It's not required to return the
  446. requested items in the correct order. Duplicated hrefs can be ignored.
  447. Returns tuples with the href and the item or None if the item doesn't
  448. exist.
  449. """
  450. return ((href, self.get(href)) for href in hrefs)
  451. def get_all(self):
  452. """Fetch all items.
  453. Functionally similar to ``get``, but might bring performance benefits
  454. on some storages when used cleverly.
  455. """
  456. return map(self.get, self.list())
  457. def get_all_filtered(self, filters):
  458. """Fetch all items with optional filtering.
  459. This can largely improve performance of reports depending on
  460. the filters and this implementation.
  461. Returns tuples in the form ``(item, filters_matched)``.
  462. ``filters_matched`` is a bool that indicates if ``filters`` are fully
  463. matched.
  464. This returns all events by default
  465. """
  466. return ((item, False) for item in self.get_all())
  467. def pre_filtered_list(self, filters):
  468. """List collection items with optional pre filtering.
  469. DEPRECATED: use ``get_all_filtered`` instead
  470. """
  471. return self.get_all()
  472. def has(self, href):
  473. """Check if an item exists by its href.
  474. Functionally similar to ``get``, but might bring performance benefits
  475. on some storages when used cleverly.
  476. """
  477. return self.get(href) is not None
  478. def upload(self, href, vobject_item):
  479. """Upload a new or replace an existing item."""
  480. raise NotImplementedError
  481. def delete(self, href=None):
  482. """Delete an item.
  483. When ``href`` is ``None``, delete the collection.
  484. """
  485. raise NotImplementedError
  486. def get_meta(self, key=None):
  487. """Get metadata value for collection.
  488. Return the value of the property ``key``. If ``key`` is ``None`` return
  489. a dict with all properties
  490. """
  491. raise NotImplementedError
  492. def set_meta(self, props):
  493. """Set metadata values for collection.
  494. ``props`` a dict with updates for properties. If a value is empty, the
  495. property must be deleted.
  496. DEPRECATED: use ``set_meta_all`` instead
  497. """
  498. raise NotImplementedError
  499. def set_meta_all(self, props):
  500. """Set metadata values for collection.
  501. ``props`` a dict with values for properties.
  502. """
  503. delta_props = self.get_meta()
  504. for key in delta_props.keys():
  505. if key not in props:
  506. delta_props[key] = None
  507. delta_props.update(props)
  508. self.set_meta(self, delta_props)
  509. @property
  510. def last_modified(self):
  511. """Get the HTTP-datetime of when the collection was modified."""
  512. raise NotImplementedError
  513. def serialize(self):
  514. """Get the unicode string representing the whole collection."""
  515. if self.get_meta("tag") == "VCALENDAR":
  516. in_vcalendar = False
  517. vtimezones = ""
  518. included_tzids = set()
  519. vtimezone = []
  520. tzid = None
  521. components = ""
  522. # Concatenate all child elements of VCALENDAR from all items
  523. # together, while preventing duplicated VTIMEZONE entries.
  524. # VTIMEZONEs are only distinguished by their TZID, if different
  525. # timezones share the same TZID this produces errornous ouput.
  526. # VObject fails at this too.
  527. for item in self.get_all():
  528. depth = 0
  529. for line in item.serialize().split("\r\n"):
  530. if line.startswith("BEGIN:"):
  531. depth += 1
  532. if depth == 1 and line == "BEGIN:VCALENDAR":
  533. in_vcalendar = True
  534. elif in_vcalendar:
  535. if depth == 1 and line.startswith("END:"):
  536. in_vcalendar = False
  537. if depth == 2 and line == "BEGIN:VTIMEZONE":
  538. vtimezone.append(line + "\r\n")
  539. elif vtimezone:
  540. vtimezone.append(line + "\r\n")
  541. if depth == 2 and line.startswith("TZID:"):
  542. tzid = line[len("TZID:"):]
  543. elif depth == 2 and line.startswith("END:"):
  544. if tzid is None or tzid not in included_tzids:
  545. vtimezones += "".join(vtimezone)
  546. included_tzids.add(tzid)
  547. vtimezone.clear()
  548. tzid = None
  549. elif depth >= 2:
  550. components += line + "\r\n"
  551. if line.startswith("END:"):
  552. depth -= 1
  553. template = vobject.iCalendar()
  554. displayname = self.get_meta("D:displayname")
  555. if displayname:
  556. template.add("X-WR-CALNAME")
  557. template.x_wr_calname.value_param = "TEXT"
  558. template.x_wr_calname.value = displayname
  559. description = self.get_meta("C:calendar-description")
  560. if description:
  561. template.add("X-WR-CALDESC")
  562. template.x_wr_caldesc.value_param = "TEXT"
  563. template.x_wr_caldesc.value = description
  564. template = template.serialize()
  565. template_insert_pos = template.find("\r\nEND:VCALENDAR\r\n") + 2
  566. assert template_insert_pos != -1
  567. return (template[:template_insert_pos] +
  568. vtimezones + components +
  569. template[template_insert_pos:])
  570. elif self.get_meta("tag") == "VADDRESSBOOK":
  571. return "".join((item.serialize() for item in self.get_all()))
  572. return ""
  573. @classmethod
  574. @contextmanager
  575. def acquire_lock(cls, mode, user=None):
  576. """Set a context manager to lock the whole storage.
  577. ``mode`` must either be "r" for shared access or "w" for exclusive
  578. access.
  579. ``user`` is the name of the logged in user or empty.
  580. """
  581. raise NotImplementedError
  582. @classmethod
  583. def verify(cls):
  584. """Check the storage for errors."""
  585. return True
  586. ITEM_CACHE_VERSION = 1
  587. class Collection(BaseCollection):
  588. """Collection stored in several files per calendar."""
  589. @classmethod
  590. def static_init(cls):
  591. # init storage lock
  592. folder = os.path.expanduser(cls.configuration.get(
  593. "storage", "filesystem_folder"))
  594. cls._makedirs_synced(folder)
  595. lock_path = None
  596. if cls.configuration.getboolean("storage", "filesystem_locking"):
  597. lock_path = os.path.join(folder, ".Radicale.lock")
  598. close_lock_file = cls.configuration.getboolean(
  599. "storage", "filesystem_close_lock_file")
  600. cls._lock = FileBackedRwLock(lock_path, close_lock_file)
  601. # init cache lock
  602. cls._cache_locks = {}
  603. cls._cache_locks_lock = threading.Lock()
  604. def __init__(self, path, principal=None, folder=None,
  605. filesystem_path=None):
  606. # DEPRECATED: Remove principal and folder attributes
  607. if folder is None:
  608. folder = self._get_collection_root_folder()
  609. # Path should already be sanitized
  610. self.path = sanitize_path(path).strip("/")
  611. self._encoding = self.configuration.get("encoding", "stock")
  612. # DEPRECATED: Use ``self._encoding`` instead
  613. self.encoding = self._encoding
  614. if filesystem_path is None:
  615. filesystem_path = path_to_filesystem(folder, self.path)
  616. self._filesystem_path = filesystem_path
  617. self._props_path = os.path.join(
  618. self._filesystem_path, ".Radicale.props")
  619. self._meta_cache = None
  620. self._etag_cache = None
  621. self._item_cache_cleaned = False
  622. @classmethod
  623. def _get_collection_root_folder(cls):
  624. filesystem_folder = os.path.expanduser(
  625. cls.configuration.get("storage", "filesystem_folder"))
  626. return os.path.join(filesystem_folder, "collection-root")
  627. @contextmanager
  628. def _atomic_write(self, path, mode="w", newline=None, sync_directory=True):
  629. directory = os.path.dirname(path)
  630. tmp = NamedTemporaryFile(
  631. mode=mode, dir=directory, delete=False, prefix=".Radicale.tmp-",
  632. newline=newline, encoding=None if "b" in mode else self._encoding)
  633. try:
  634. yield tmp
  635. tmp.flush()
  636. try:
  637. self._fsync(tmp.fileno())
  638. except OSError as e:
  639. raise RuntimeError("Fsync'ing file %r failed: %s" %
  640. (path, e)) from e
  641. tmp.close()
  642. os.replace(tmp.name, path)
  643. except BaseException:
  644. tmp.close()
  645. os.remove(tmp.name)
  646. raise
  647. if sync_directory:
  648. self._sync_directory(directory)
  649. @staticmethod
  650. def _find_available_file_name(exists_fn, suffix=""):
  651. # Prevent infinite loop
  652. for _ in range(1000):
  653. file_name = random_uuid4() + suffix
  654. if not exists_fn(file_name):
  655. return file_name
  656. # something is wrong with the PRNG
  657. raise RuntimeError("No unique random sequence found")
  658. @classmethod
  659. def _fsync(cls, fd):
  660. if cls.configuration.getboolean("storage", "filesystem_fsync"):
  661. if os.name == "posix" and hasattr(fcntl, "F_FULLFSYNC"):
  662. fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
  663. else:
  664. os.fsync(fd)
  665. @classmethod
  666. def _sync_directory(cls, path):
  667. """Sync directory to disk.
  668. This only works on POSIX and does nothing on other systems.
  669. """
  670. if not cls.configuration.getboolean("storage", "filesystem_fsync"):
  671. return
  672. if os.name == "posix":
  673. try:
  674. fd = os.open(path, 0)
  675. try:
  676. cls._fsync(fd)
  677. finally:
  678. os.close(fd)
  679. except OSError as e:
  680. raise RuntimeError("Fsync'ing directory %r failed: %s" %
  681. (path, e)) from e
  682. @classmethod
  683. def _makedirs_synced(cls, filesystem_path):
  684. """Recursively create a directory and its parents in a sync'ed way.
  685. This method acts silently when the folder already exists.
  686. """
  687. if os.path.isdir(filesystem_path):
  688. return
  689. parent_filesystem_path = os.path.dirname(filesystem_path)
  690. # Prevent infinite loop
  691. if filesystem_path != parent_filesystem_path:
  692. # Create parent dirs recursively
  693. cls._makedirs_synced(parent_filesystem_path)
  694. # Possible race!
  695. os.makedirs(filesystem_path, exist_ok=True)
  696. cls._sync_directory(parent_filesystem_path)
  697. @classmethod
  698. def discover(cls, path, depth="0", child_context_manager=(
  699. lambda path, href=None: contextlib.ExitStack())):
  700. # Path should already be sanitized
  701. sane_path = sanitize_path(path).strip("/")
  702. attributes = sane_path.split("/") if sane_path else []
  703. folder = cls._get_collection_root_folder()
  704. # Create the root collection
  705. cls._makedirs_synced(folder)
  706. try:
  707. filesystem_path = path_to_filesystem(folder, sane_path)
  708. except ValueError as e:
  709. # Path is unsafe
  710. cls.logger.debug("Unsafe path %r requested from storage: %s",
  711. sane_path, e, exc_info=True)
  712. return
  713. # Check if the path exists and if it leads to a collection or an item
  714. if not os.path.isdir(filesystem_path):
  715. if attributes and os.path.isfile(filesystem_path):
  716. href = attributes.pop()
  717. else:
  718. return
  719. else:
  720. href = None
  721. sane_path = "/".join(attributes)
  722. collection = cls(sane_path)
  723. if href:
  724. yield collection.get(href)
  725. return
  726. yield collection
  727. if depth == "0":
  728. return
  729. for href in collection.list():
  730. with child_context_manager(sane_path, href):
  731. yield collection.get(href)
  732. for href in scandir(filesystem_path, only_dirs=True):
  733. if not is_safe_filesystem_path_component(href):
  734. if not href.startswith(".Radicale"):
  735. cls.logger.debug("Skipping collection %r in %r", href,
  736. sane_path)
  737. continue
  738. child_path = posixpath.join(sane_path, href)
  739. with child_context_manager(child_path):
  740. yield cls(child_path)
  741. @classmethod
  742. def verify(cls):
  743. item_errors = collection_errors = 0
  744. @contextlib.contextmanager
  745. def exception_cm(path, href=None):
  746. nonlocal item_errors, collection_errors
  747. try:
  748. yield
  749. except Exception as e:
  750. if href:
  751. item_errors += 1
  752. name = "item %r in %r" % (href, path.strip("/"))
  753. else:
  754. collection_errors += 1
  755. name = "collection %r" % path.strip("/")
  756. cls.logger.error("Invalid %s: %s", name, e, exc_info=True)
  757. remaining_paths = [""]
  758. while remaining_paths:
  759. path = remaining_paths.pop(0)
  760. cls.logger.debug("Verifying collection %r", path)
  761. with exception_cm(path):
  762. saved_item_errors = item_errors
  763. collection = None
  764. for item in cls.discover(path, "1", exception_cm):
  765. if not collection:
  766. collection = item
  767. collection.get_meta()
  768. continue
  769. if isinstance(item, BaseCollection):
  770. remaining_paths.append(item.path)
  771. else:
  772. cls.logger.debug("Verified item %r in %r",
  773. item.href, path)
  774. if item_errors == saved_item_errors:
  775. collection.sync()
  776. return item_errors == 0 and collection_errors == 0
  777. @classmethod
  778. def create_collection(cls, href, collection=None, props=None):
  779. folder = cls._get_collection_root_folder()
  780. # Path should already be sanitized
  781. sane_path = sanitize_path(href).strip("/")
  782. filesystem_path = path_to_filesystem(folder, sane_path)
  783. if not props:
  784. cls._makedirs_synced(filesystem_path)
  785. return cls(sane_path)
  786. parent_dir = os.path.dirname(filesystem_path)
  787. cls._makedirs_synced(parent_dir)
  788. # Create a temporary directory with an unsafe name
  789. with TemporaryDirectory(
  790. prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
  791. # The temporary directory itself can't be renamed
  792. tmp_filesystem_path = os.path.join(tmp_dir, "collection")
  793. os.makedirs(tmp_filesystem_path)
  794. self = cls(sane_path, filesystem_path=tmp_filesystem_path)
  795. self.set_meta_all(props)
  796. if collection:
  797. if props.get("tag") == "VCALENDAR":
  798. collection, = collection
  799. items = []
  800. for content in ("vevent", "vtodo", "vjournal"):
  801. items.extend(
  802. getattr(collection, "%s_list" % content, []))
  803. items_by_uid = groupby(sorted(items, key=get_uid), get_uid)
  804. vobject_items = {}
  805. for uid, items in items_by_uid:
  806. new_collection = vobject.iCalendar()
  807. for item in items:
  808. new_collection.add(item)
  809. # href must comply to is_safe_filesystem_path_component
  810. # and no file name collisions must exist between hrefs
  811. href = self._find_available_file_name(
  812. vobject_items.get, suffix=".ics")
  813. vobject_items[href] = new_collection
  814. self._upload_all_nonatomic(vobject_items)
  815. elif props.get("tag") == "VADDRESSBOOK":
  816. vobject_items = {}
  817. for card in collection:
  818. # href must comply to is_safe_filesystem_path_component
  819. # and no file name collisions must exist between hrefs
  820. href = self._find_available_file_name(
  821. vobject_items.get, suffix=".vcf")
  822. vobject_items[href] = card
  823. self._upload_all_nonatomic(vobject_items)
  824. # This operation is not atomic on the filesystem level but it's
  825. # very unlikely that one rename operations succeeds while the
  826. # other fails or that only one gets written to disk.
  827. if os.path.exists(filesystem_path):
  828. os.rename(filesystem_path, os.path.join(tmp_dir, "delete"))
  829. os.rename(tmp_filesystem_path, filesystem_path)
  830. cls._sync_directory(parent_dir)
  831. return cls(sane_path)
  832. def upload_all_nonatomic(self, vobject_items):
  833. """DEPRECATED: Use ``_upload_all_nonatomic``"""
  834. return self._upload_all_nonatomic(vobject_items)
  835. def _upload_all_nonatomic(self, vobject_items):
  836. """Upload a new set of items.
  837. This takes a mapping of href and vobject items and
  838. uploads them nonatomic and without existence checks.
  839. """
  840. cache_folder = os.path.join(self._filesystem_path,
  841. ".Radicale.cache", "item")
  842. self._makedirs_synced(cache_folder)
  843. for href, vobject_item in vobject_items.items():
  844. if not is_safe_filesystem_path_component(href):
  845. raise UnsafePathError(href)
  846. try:
  847. cache_content = self._item_cache_content(href, vobject_item)
  848. _, _, _, text, _, _, _, _ = cache_content
  849. except Exception as e:
  850. raise ValueError(
  851. "Failed to store item %r in temporary collection %r: %s" %
  852. (href, self.path, e)) from e
  853. with self._atomic_write(os.path.join(cache_folder, href), "wb",
  854. sync_directory=False) as f:
  855. pickle.dump(cache_content, f)
  856. path = path_to_filesystem(self._filesystem_path, href)
  857. with self._atomic_write(
  858. path, newline="", sync_directory=False) as f:
  859. f.write(text)
  860. self._sync_directory(cache_folder)
  861. self._sync_directory(self._filesystem_path)
  862. @classmethod
  863. def move(cls, item, to_collection, to_href):
  864. if not is_safe_filesystem_path_component(to_href):
  865. raise UnsafePathError(to_href)
  866. os.replace(
  867. path_to_filesystem(item.collection._filesystem_path, item.href),
  868. path_to_filesystem(to_collection._filesystem_path, to_href))
  869. cls._sync_directory(to_collection._filesystem_path)
  870. if item.collection._filesystem_path != to_collection._filesystem_path:
  871. cls._sync_directory(item.collection._filesystem_path)
  872. # Move the item cache entry
  873. cache_folder = os.path.join(item.collection._filesystem_path,
  874. ".Radicale.cache", "item")
  875. to_cache_folder = os.path.join(to_collection._filesystem_path,
  876. ".Radicale.cache", "item")
  877. cls._makedirs_synced(to_cache_folder)
  878. try:
  879. os.replace(os.path.join(cache_folder, item.href),
  880. os.path.join(to_cache_folder, to_href))
  881. except FileNotFoundError:
  882. pass
  883. else:
  884. cls._makedirs_synced(to_cache_folder)
  885. if cache_folder != to_cache_folder:
  886. cls._makedirs_synced(cache_folder)
  887. # Track the change
  888. to_collection._update_history_etag(to_href, item)
  889. item.collection._update_history_etag(item.href, None)
  890. to_collection._clean_history_cache()
  891. if item.collection._filesystem_path != to_collection._filesystem_path:
  892. item.collection._clean_history_cache()
  893. @classmethod
  894. def _clean_cache(cls, folder, names, max_age=None):
  895. """Delete all ``names`` in ``folder`` that are older than ``max_age``.
  896. """
  897. age_limit = time.time() - max_age if max_age is not None else None
  898. modified = False
  899. for name in names:
  900. if not is_safe_filesystem_path_component(name):
  901. continue
  902. if age_limit is not None:
  903. try:
  904. # Race: Another process might have deleted the file.
  905. mtime = os.path.getmtime(os.path.join(folder, name))
  906. except FileNotFoundError:
  907. continue
  908. if mtime > age_limit:
  909. continue
  910. cls.logger.debug("Found expired item in cache: %r", name)
  911. # Race: Another process might have deleted or locked the
  912. # file.
  913. try:
  914. os.remove(os.path.join(folder, name))
  915. except (FileNotFoundError, PermissionError):
  916. continue
  917. modified = True
  918. if modified:
  919. cls._sync_directory(folder)
  920. def _update_history_etag(self, href, item):
  921. """Updates and retrieves the history etag from the history cache.
  922. The history cache contains a file for each current and deleted item
  923. of the collection. These files contain the etag of the item (empty
  924. string for deleted items) and a history etag, which is a hash over
  925. the previous history etag and the etag separated by "/".
  926. """
  927. history_folder = os.path.join(self._filesystem_path,
  928. ".Radicale.cache", "history")
  929. try:
  930. with open(os.path.join(history_folder, href), "rb") as f:
  931. cache_etag, history_etag = pickle.load(f)
  932. except (FileNotFoundError, pickle.UnpicklingError, ValueError) as e:
  933. if isinstance(e, (pickle.UnpicklingError, ValueError)):
  934. self.logger.warning(
  935. "Failed to load history cache entry %r in %r: %s",
  936. href, self.path, e, exc_info=True)
  937. cache_etag = ""
  938. # Initialize with random data to prevent collisions with cleaned
  939. # expired items.
  940. history_etag = binascii.hexlify(os.urandom(16)).decode("ascii")
  941. etag = item.etag if item else ""
  942. if etag != cache_etag:
  943. self._makedirs_synced(history_folder)
  944. history_etag = get_etag(history_etag + "/" + etag).strip("\"")
  945. try:
  946. # Race: Other processes might have created and locked the file.
  947. with self._atomic_write(os.path.join(history_folder, href),
  948. "wb") as f:
  949. pickle.dump([etag, history_etag], f)
  950. except PermissionError:
  951. pass
  952. return history_etag
  953. def _get_deleted_history_hrefs(self):
  954. """Returns the hrefs of all deleted items that are still in the
  955. history cache."""
  956. history_folder = os.path.join(self._filesystem_path,
  957. ".Radicale.cache", "history")
  958. try:
  959. for href in scandir(history_folder):
  960. if not is_safe_filesystem_path_component(href):
  961. continue
  962. if os.path.isfile(os.path.join(self._filesystem_path, href)):
  963. continue
  964. yield href
  965. except FileNotFoundError:
  966. pass
  967. def _clean_history_cache(self):
  968. # Delete all expired cache entries of deleted items.
  969. history_folder = os.path.join(self._filesystem_path,
  970. ".Radicale.cache", "history")
  971. self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
  972. max_age=self.configuration.getint(
  973. "storage", "max_sync_token_age"))
  974. def sync(self, old_token=None):
  975. # The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
  976. # where TOKEN_NAME is the md5 hash of all history etags of present and
  977. # past items of the collection.
  978. def check_token_name(token_name):
  979. if len(token_name) != 32:
  980. return False
  981. for c in token_name:
  982. if c not in "0123456789abcdef":
  983. return False
  984. return True
  985. old_token_name = None
  986. if old_token:
  987. # Extract the token name from the sync token
  988. if not old_token.startswith("http://radicale.org/ns/sync/"):
  989. raise ValueError("Malformed token: %r" % old_token)
  990. old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
  991. if not check_token_name(old_token_name):
  992. raise ValueError("Malformed token: %r" % old_token)
  993. # Get the current state and sync-token of the collection.
  994. state = {}
  995. token_name_hash = md5()
  996. # Find the history of all existing and deleted items
  997. for href, item in chain(
  998. ((item.href, item) for item in self.get_all()),
  999. ((href, None) for href in self._get_deleted_history_hrefs())):
  1000. history_etag = self._update_history_etag(href, item)
  1001. state[href] = history_etag
  1002. token_name_hash.update((href + "/" + history_etag).encode("utf-8"))
  1003. token_name = token_name_hash.hexdigest()
  1004. token = "http://radicale.org/ns/sync/%s" % token_name
  1005. if token_name == old_token_name:
  1006. # Nothing changed
  1007. return token, ()
  1008. token_folder = os.path.join(self._filesystem_path,
  1009. ".Radicale.cache", "sync-token")
  1010. token_path = os.path.join(token_folder, token_name)
  1011. old_state = {}
  1012. if old_token_name:
  1013. # load the old token state
  1014. old_token_path = os.path.join(token_folder, old_token_name)
  1015. try:
  1016. # Race: Another process might have deleted the file.
  1017. with open(old_token_path, "rb") as f:
  1018. old_state = pickle.load(f)
  1019. except (FileNotFoundError, pickle.UnpicklingError,
  1020. ValueError) as e:
  1021. if isinstance(e, (pickle.UnpicklingError, ValueError)):
  1022. self.logger.warning(
  1023. "Failed to load stored sync token %r in %r: %s",
  1024. old_token_name, self.path, e, exc_info=True)
  1025. # Delete the damaged file
  1026. try:
  1027. os.remove(old_token_path)
  1028. except (FileNotFoundError, PermissionError):
  1029. pass
  1030. raise ValueError("Token not found: %r" % old_token)
  1031. # write the new token state or update the modification time of
  1032. # existing token state
  1033. if not os.path.exists(token_path):
  1034. self._makedirs_synced(token_folder)
  1035. try:
  1036. # Race: Other processes might have created and locked the file.
  1037. with self._atomic_write(token_path, "wb") as f:
  1038. pickle.dump(state, f)
  1039. except PermissionError:
  1040. pass
  1041. else:
  1042. # clean up old sync tokens and item cache
  1043. self._clean_cache(token_folder, os.listdir(token_folder),
  1044. max_age=self.configuration.getint(
  1045. "storage", "max_sync_token_age"))
  1046. self._clean_history_cache()
  1047. else:
  1048. # Try to update the modification time
  1049. try:
  1050. # Race: Another process might have deleted the file.
  1051. os.utime(token_path)
  1052. except FileNotFoundError:
  1053. pass
  1054. changes = []
  1055. # Find all new, changed and deleted (that are still in the item cache)
  1056. # items
  1057. for href, history_etag in state.items():
  1058. if history_etag != old_state.get(href):
  1059. changes.append(href)
  1060. # Find all deleted items that are no longer in the item cache
  1061. for href, history_etag in old_state.items():
  1062. if href not in state:
  1063. changes.append(href)
  1064. return token, changes
  1065. def list(self):
  1066. for href in scandir(self._filesystem_path, only_files=True):
  1067. if not is_safe_filesystem_path_component(href):
  1068. if not href.startswith(".Radicale"):
  1069. self.logger.debug(
  1070. "Skipping item %r in %r", href, self.path)
  1071. continue
  1072. yield href
  1073. def get(self, href, verify_href=True):
  1074. item, metadata = self._get_with_metadata(href, verify_href=verify_href)
  1075. return item
  1076. def _item_cache_hash(self, raw_text):
  1077. _hash = md5()
  1078. _hash.update(left_encode_int(ITEM_CACHE_VERSION))
  1079. _hash.update(raw_text)
  1080. return _hash.hexdigest()
  1081. def _item_cache_content(self, href, vobject_item, cache_hash=None):
  1082. text = vobject_item.serialize()
  1083. if cache_hash is None:
  1084. cache_hash = self._item_cache_hash(text.encode(self._encoding))
  1085. etag = get_etag(text)
  1086. uid = get_uid_from_object(vobject_item)
  1087. name = vobject_item.name
  1088. tag, start, end = xmlutils.find_tag_and_time_range(vobject_item)
  1089. return cache_hash, uid, etag, text, name, tag, start, end
  1090. def _store_item_cache(self, href, vobject_item, cache_hash=None):
  1091. cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
  1092. "item")
  1093. content = self._item_cache_content(href, vobject_item, cache_hash)
  1094. self._makedirs_synced(cache_folder)
  1095. try:
  1096. # Race: Other processes might have created and locked the
  1097. # file.
  1098. with self._atomic_write(os.path.join(cache_folder, href),
  1099. "wb") as f:
  1100. pickle.dump(content, f)
  1101. except PermissionError:
  1102. pass
  1103. return content
  1104. @contextmanager
  1105. def _acquire_cache_lock(self, ns=""):
  1106. if "/" in ns:
  1107. raise ValueError("ns must not include '/'")
  1108. with contextlib.ExitStack() as lock_stack:
  1109. with contextlib.ExitStack() as locks_lock_stack:
  1110. locks_lock_stack.enter_context(self._cache_locks_lock)
  1111. lock_id = ns + "/" + self.path
  1112. lock = self._cache_locks.get(lock_id)
  1113. if not lock:
  1114. cache_folder = os.path.join(self._filesystem_path,
  1115. ".Radicale.cache")
  1116. self._makedirs_synced(cache_folder)
  1117. lock_path = None
  1118. if self.configuration.getboolean(
  1119. "storage", "filesystem_locking"):
  1120. lock_path = os.path.join(
  1121. cache_folder,
  1122. ".Radicale.lock" + (".%s" % ns if ns else ""))
  1123. lock = FileBackedRwLock(lock_path)
  1124. self._cache_locks[lock_id] = lock
  1125. lock_stack.enter_context(lock.acquire_lock(
  1126. "w", lambda: locks_lock_stack.pop_all().close()))
  1127. try:
  1128. yield
  1129. finally:
  1130. with self._cache_locks_lock:
  1131. lock_stack.pop_all().close()
  1132. if not lock.in_use():
  1133. del self._cache_locks[lock_id]
  1134. def _load_item_cache(self, href, input_hash):
  1135. cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
  1136. "item")
  1137. cache_hash = uid = etag = text = name = tag = start = end = None
  1138. try:
  1139. with open(os.path.join(cache_folder, href), "rb") as f:
  1140. cache_hash, *content = pickle.load(f)
  1141. if cache_hash == input_hash:
  1142. uid, etag, text, name, tag, start, end = content
  1143. except FileNotFoundError as e:
  1144. pass
  1145. except (pickle.UnpicklingError, ValueError) as e:
  1146. self.logger.warning(
  1147. "Failed to load item cache entry %r in %r: %s",
  1148. href, self.path, e, exc_info=True)
  1149. return cache_hash, uid, etag, text, name, tag, start, end
  1150. def _clean_item_cache(self):
  1151. cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
  1152. "item")
  1153. self._clean_cache(cache_folder, (
  1154. href for href in scandir(cache_folder) if not
  1155. os.path.isfile(os.path.join(self._filesystem_path, href))))
  1156. def _get_with_metadata(self, href, verify_href=True):
  1157. """Like ``get`` but additonally returns the following metadata:
  1158. tag, start, end: see ``xmlutils.find_tag_and_time_range``. If
  1159. extraction of the metadata failed, the values are all ``None``."""
  1160. if verify_href:
  1161. try:
  1162. if not is_safe_filesystem_path_component(href):
  1163. raise UnsafePathError(href)
  1164. path = path_to_filesystem(self._filesystem_path, href)
  1165. except ValueError as e:
  1166. self.logger.debug(
  1167. "Can't translate name %r safely to filesystem in %r: %s",
  1168. href, self.path, e, exc_info=True)
  1169. return None, None
  1170. else:
  1171. path = os.path.join(self._filesystem_path, href)
  1172. try:
  1173. with open(path, "rb") as f:
  1174. raw_text = f.read()
  1175. except (FileNotFoundError, IsADirectoryError):
  1176. return None, None
  1177. except PermissionError:
  1178. # Windows raises ``PermissionError`` when ``path`` is a directory
  1179. if (os.name == "nt" and
  1180. os.path.isdir(path) and os.access(path, os.R_OK)):
  1181. return None, None
  1182. raise
  1183. # The hash of the component in the file system. This is used to check,
  1184. # if the entry in the cache is still valid.
  1185. input_hash = self._item_cache_hash(raw_text)
  1186. cache_hash, uid, etag, text, name, tag, start, end = \
  1187. self._load_item_cache(href, input_hash)
  1188. vobject_item = None
  1189. if input_hash != cache_hash:
  1190. with contextlib.ExitStack() as lock_stack:
  1191. # Lock the item cache to prevent multpile processes from
  1192. # generating the same data in parallel.
  1193. # This improves the performance for multiple requests.
  1194. if self._lock.locked() == "r":
  1195. lock_stack.enter_context(self._acquire_cache_lock("item"))
  1196. # Check if another process created the file in the meantime
  1197. cache_hash, uid, etag, text, name, tag, start, end = \
  1198. self._load_item_cache(href, input_hash)
  1199. if input_hash != cache_hash:
  1200. try:
  1201. vobject_items = tuple(vobject.readComponents(
  1202. raw_text.decode(self._encoding)))
  1203. if len(vobject_items) != 1:
  1204. raise RuntimeError("Content contains %d components"
  1205. % len(vobject_items))
  1206. vobject_item = vobject_items[0]
  1207. check_and_sanitize_item(vobject_item, uid=uid,
  1208. tag=self.get_meta("tag"))
  1209. cache_hash, uid, etag, text, name, tag, start, end = \
  1210. self._store_item_cache(
  1211. href, vobject_item, input_hash)
  1212. except Exception as e:
  1213. raise RuntimeError("Failed to load item %r in %r: %s" %
  1214. (href, self.path, e)) from e
  1215. # Clean cache entries once after the data in the file
  1216. # system was edited externally.
  1217. if not self._item_cache_cleaned:
  1218. self._item_cache_cleaned = True
  1219. self._clean_item_cache()
  1220. last_modified = time.strftime(
  1221. "%a, %d %b %Y %H:%M:%S GMT",
  1222. time.gmtime(os.path.getmtime(path)))
  1223. return Item(
  1224. self, href=href, last_modified=last_modified, etag=etag,
  1225. text=text, item=vobject_item, uid=uid, name=name,
  1226. component_name=tag), (tag, start, end)
  1227. def get_multi2(self, hrefs):
  1228. # It's faster to check for file name collissions here, because
  1229. # we only need to call os.listdir once.
  1230. files = None
  1231. for href in hrefs:
  1232. if files is None:
  1233. # List dir after hrefs returned one item, the iterator may be
  1234. # empty and the for-loop is never executed.
  1235. files = os.listdir(self._filesystem_path)
  1236. path = os.path.join(self._filesystem_path, href)
  1237. if (not is_safe_filesystem_path_component(href) or
  1238. href not in files and os.path.lexists(path)):
  1239. self.logger.debug(
  1240. "Can't translate name safely to filesystem: %r", href)
  1241. yield (href, None)
  1242. else:
  1243. yield (href, self.get(href, verify_href=False))
  1244. def get_all(self):
  1245. # We don't need to check for collissions, because the the file names
  1246. # are from os.listdir.
  1247. return (self.get(href, verify_href=False) for href in self.list())
  1248. def get_all_filtered(self, filters):
  1249. tag, start, end, simple = xmlutils.simplify_prefilters(
  1250. filters, collection_tag=self.get_meta("tag"))
  1251. if not tag:
  1252. # no filter
  1253. yield from ((item, simple) for item in self.get_all())
  1254. return
  1255. for item, (itag, istart, iend) in (
  1256. self._get_with_metadata(href, verify_href=False)
  1257. for href in self.list()):
  1258. if tag == itag and istart < end and iend > start:
  1259. yield item, simple and (start <= istart or iend <= end)
  1260. def upload(self, href, vobject_item):
  1261. if not is_safe_filesystem_path_component(href):
  1262. raise UnsafePathError(href)
  1263. try:
  1264. cache_hash, uid, etag, text, name, tag, _, _ = \
  1265. self._store_item_cache(href, vobject_item)
  1266. except Exception as e:
  1267. raise ValueError("Failed to store item %r in collection %r: %s" %
  1268. (href, self.path, e)) from e
  1269. path = path_to_filesystem(self._filesystem_path, href)
  1270. with self._atomic_write(path, newline="") as fd:
  1271. fd.write(text)
  1272. # Clean the cache after the actual item is stored, or the cache entry
  1273. # will be removed again.
  1274. self._clean_item_cache()
  1275. item = Item(self, href=href, etag=etag, text=text, item=vobject_item,
  1276. uid=uid, name=name, component_name=tag)
  1277. # Track the change
  1278. self._update_history_etag(href, item)
  1279. self._clean_history_cache()
  1280. return item
  1281. def delete(self, href=None):
  1282. if href is None:
  1283. # Delete the collection
  1284. parent_dir = os.path.dirname(self._filesystem_path)
  1285. try:
  1286. os.rmdir(self._filesystem_path)
  1287. except OSError:
  1288. with TemporaryDirectory(
  1289. prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
  1290. os.rename(self._filesystem_path, os.path.join(
  1291. tmp, os.path.basename(self._filesystem_path)))
  1292. self._sync_directory(parent_dir)
  1293. else:
  1294. self._sync_directory(parent_dir)
  1295. else:
  1296. # Delete an item
  1297. if not is_safe_filesystem_path_component(href):
  1298. raise UnsafePathError(href)
  1299. path = path_to_filesystem(self._filesystem_path, href)
  1300. if not os.path.isfile(path):
  1301. raise ComponentNotFoundError(href)
  1302. os.remove(path)
  1303. self._sync_directory(os.path.dirname(path))
  1304. # Track the change
  1305. self._update_history_etag(href, None)
  1306. self._clean_history_cache()
  1307. def get_meta(self, key=None):
  1308. # reuse cached value if the storage is read-only
  1309. if self._lock.locked() == "w" or self._meta_cache is None:
  1310. try:
  1311. try:
  1312. with open(self._props_path, encoding=self._encoding) as f:
  1313. self._meta_cache = json.load(f)
  1314. except FileNotFoundError:
  1315. self._meta_cache = {}
  1316. check_and_sanitize_props(self._meta_cache)
  1317. except ValueError as e:
  1318. raise RuntimeError("Failed to load properties of collection "
  1319. "%r: %s" % (self.path, e)) from e
  1320. return self._meta_cache.get(key) if key else self._meta_cache
  1321. def set_meta_all(self, props):
  1322. with self._atomic_write(self._props_path, "w") as f:
  1323. json.dump(props, f, sort_keys=True)
  1324. @property
  1325. def last_modified(self):
  1326. relevant_files = chain(
  1327. (self._filesystem_path,),
  1328. (self._props_path,) if os.path.exists(self._props_path) else (),
  1329. (os.path.join(self._filesystem_path, h) for h in self.list()))
  1330. last = max(map(os.path.getmtime, relevant_files))
  1331. return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
  1332. @property
  1333. def etag(self):
  1334. # reuse cached value if the storage is read-only
  1335. if self._lock.locked() == "w" or self._etag_cache is None:
  1336. self._etag_cache = super().etag
  1337. return self._etag_cache
  1338. @classmethod
  1339. @contextmanager
  1340. def acquire_lock(cls, mode, user=None):
  1341. with cls._lock.acquire_lock(mode):
  1342. yield
  1343. # execute hook
  1344. hook = cls.configuration.get("storage", "hook")
  1345. if mode == "w" and hook:
  1346. folder = os.path.expanduser(cls.configuration.get(
  1347. "storage", "filesystem_folder"))
  1348. cls.logger.debug("Running hook")
  1349. debug = cls.logger.isEnabledFor(logging.DEBUG)
  1350. p = subprocess.Popen(
  1351. hook % {"user": shlex.quote(user or "Anonymous")},
  1352. stdin=subprocess.DEVNULL,
  1353. stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
  1354. stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
  1355. shell=True, universal_newlines=True, cwd=folder)
  1356. stdout_data, stderr_data = p.communicate()
  1357. if stdout_data:
  1358. cls.logger.debug("Captured stdout hook:\n%s", stdout_data)
  1359. if stderr_data:
  1360. cls.logger.debug("Captured stderr hook:\n%s", stderr_data)
  1361. if p.returncode != 0:
  1362. raise subprocess.CalledProcessError(p.returncode, p.args)
  1363. class FileBackedRwLock:
  1364. """A readers-Writer lock that can additionally lock a file.
  1365. All requests are processed in FIFO order.
  1366. """
  1367. def __init__(self, path=None, close_lock_file=True):
  1368. """Initilize a lock.
  1369. ``path`` the file that is used for locking (optional)
  1370. ``close_lock_file`` close the lock file, when unlocked and no requests
  1371. are pending
  1372. """
  1373. self._path = path
  1374. self._close_lock_file = close_lock_file
  1375. self._lock = threading.Lock()
  1376. self._waiters = []
  1377. self._lock_file = None
  1378. self._lock_file_locked = False
  1379. self._readers = 0
  1380. self._writer = False
  1381. def locked(self):
  1382. if self._writer:
  1383. return "w"
  1384. if self._readers:
  1385. return "r"
  1386. return ""
  1387. def in_use(self):
  1388. with self._lock:
  1389. return self._waiters or self._readers or self._writer
  1390. @contextmanager
  1391. def acquire_lock(self, mode, sync_callback=None):
  1392. def condition():
  1393. if mode == "r":
  1394. return not self._writer
  1395. else:
  1396. return not self._writer and self._readers == 0
  1397. # Use a primitive lock which only works within one process as a
  1398. # precondition for inter-process file-based locking
  1399. with self._lock:
  1400. if sync_callback:
  1401. sync_callback()
  1402. if self._waiters or not condition():
  1403. # Use FIFO for access requests
  1404. waiter = threading.Condition(lock=self._lock)
  1405. self._waiters.append(waiter)
  1406. while True:
  1407. waiter.wait()
  1408. if condition():
  1409. break
  1410. self._waiters.pop(0)
  1411. if mode == "r":
  1412. self._readers += 1
  1413. # Notify additional potential readers
  1414. if self._waiters:
  1415. self._waiters[0].notify()
  1416. else:
  1417. self._writer = True
  1418. if self._path and not self._lock_file_locked:
  1419. if not self._lock_file:
  1420. self._lock_file = open(self._path, "w+")
  1421. if os.name == "nt":
  1422. handle = msvcrt.get_osfhandle(self._lock_file.fileno())
  1423. flags = LOCKFILE_EXCLUSIVE_LOCK if mode == "w" else 0
  1424. overlapped = Overlapped()
  1425. if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
  1426. raise RuntimeError("Locking the storage failed "
  1427. "(can be disabled in the config): "
  1428. "%s" % ctypes.FormatError())
  1429. elif os.name == "posix":
  1430. _cmd = fcntl.LOCK_EX if mode == "w" else fcntl.LOCK_SH
  1431. try:
  1432. fcntl.flock(self._lock_file.fileno(), _cmd)
  1433. except OSError as e:
  1434. raise RuntimeError("Locking the storage failed "
  1435. "(can be disabled in the config): "
  1436. "%s" % e) from e
  1437. else:
  1438. raise RuntimeError("Locking the storage failed "
  1439. "(can be disabled in the config): "
  1440. "Unsupported operating system")
  1441. self._lock_file_locked = True
  1442. try:
  1443. yield
  1444. finally:
  1445. with self._lock:
  1446. if mode == "r":
  1447. self._readers -= 1
  1448. else:
  1449. self._writer = False
  1450. if self._lock_file_locked and self._readers == 0:
  1451. if os.name == "nt":
  1452. handle = msvcrt.get_osfhandle(self._lock_file.fileno())
  1453. overlapped = Overlapped()
  1454. if not unlock_file_ex(handle, 0, 1, 0, overlapped):
  1455. raise RuntimeError("Unlocking the storage failed: "
  1456. "%s" % ctypes.FormatError())
  1457. elif os.name == "posix":
  1458. try:
  1459. fcntl.flock(self._lock_file.fileno(),
  1460. fcntl.LOCK_UN)
  1461. except OSError as e:
  1462. raise RuntimeError("Unlocking the storage failed: "
  1463. "%s" % e) from e
  1464. else:
  1465. raise RuntimeError("Unlocking the storage failed: "
  1466. "Unsupported operating system")
  1467. if self._close_lock_file and not self._waiters:
  1468. self._lock_file.close()
  1469. self._lock_file = None
  1470. self._lock_file_locked = False
  1471. if self._waiters:
  1472. self._waiters[0].notify()