storage.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898
  1. # This file is part of Radicale Server - Calendar Server
  2. # Copyright © 2014 Jean-Marc Martins
  3. # Copyright © 2012-2017 Guillaume Ayoub
  4. #
  5. # This library is free software: you can redistribute it and/or modify
  6. # it under the terms of the GNU General Public License as published by
  7. # the Free Software Foundation, either version 3 of the License, or
  8. # (at your option) any later version.
  9. #
  10. # This library is distributed in the hope that it will be useful,
  11. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. # GNU General Public License for more details.
  14. #
  15. # You should have received a copy of the GNU General Public License
  16. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  17. """
  18. Storage backends.
  19. This module loads the storage backend, according to the storage configuration.
  20. Default storage uses one folder per collection and one file per collection
  21. entry.
  22. """
  23. import contextlib
  24. import datetime
  25. import errno
  26. import json
  27. import os
  28. import posixpath
  29. import shlex
  30. import stat
  31. import subprocess
  32. import threading
  33. import time
  34. from contextlib import contextmanager
  35. from hashlib import md5
  36. from importlib import import_module
  37. from itertools import groupby
  38. from random import getrandbits
  39. from tempfile import NamedTemporaryFile, TemporaryDirectory
  40. import vobject
  41. if os.name == "nt":
  42. import ctypes
  43. import ctypes.wintypes
  44. import msvcrt
  45. LOCKFILE_EXCLUSIVE_LOCK = 2
  46. if ctypes.sizeof(ctypes.c_void_p) == 4:
  47. ULONG_PTR = ctypes.c_uint32
  48. else:
  49. ULONG_PTR = ctypes.c_uint64
  50. class Overlapped(ctypes.Structure):
  51. _fields_ = [
  52. ("internal", ULONG_PTR),
  53. ("internal_high", ULONG_PTR),
  54. ("offset", ctypes.wintypes.DWORD),
  55. ("offset_high", ctypes.wintypes.DWORD),
  56. ("h_event", ctypes.wintypes.HANDLE)]
  57. lock_file_ex = ctypes.windll.kernel32.LockFileEx
  58. lock_file_ex.argtypes = [
  59. ctypes.wintypes.HANDLE,
  60. ctypes.wintypes.DWORD,
  61. ctypes.wintypes.DWORD,
  62. ctypes.wintypes.DWORD,
  63. ctypes.wintypes.DWORD,
  64. ctypes.POINTER(Overlapped)]
  65. lock_file_ex.restype = ctypes.wintypes.BOOL
  66. unlock_file_ex = ctypes.windll.kernel32.UnlockFileEx
  67. unlock_file_ex.argtypes = [
  68. ctypes.wintypes.HANDLE,
  69. ctypes.wintypes.DWORD,
  70. ctypes.wintypes.DWORD,
  71. ctypes.wintypes.DWORD,
  72. ctypes.POINTER(Overlapped)]
  73. unlock_file_ex.restype = ctypes.wintypes.BOOL
  74. elif os.name == "posix":
  75. import fcntl
  76. def load(configuration, logger):
  77. """Load the storage manager chosen in configuration."""
  78. storage_type = configuration.get("storage", "type")
  79. if storage_type == "multifilesystem":
  80. collection_class = Collection
  81. else:
  82. try:
  83. collection_class = import_module(storage_type).Collection
  84. except ImportError as e:
  85. raise RuntimeError("Storage module %r not found" %
  86. storage_type) from e
  87. logger.info("Storage type is %r", storage_type)
  88. class CollectionCopy(collection_class):
  89. """Collection copy, avoids overriding the original class attributes."""
  90. CollectionCopy.configuration = configuration
  91. CollectionCopy.logger = logger
  92. return CollectionCopy
  93. def get_etag(text):
  94. """Etag from collection or item.
  95. Encoded as quoted-string (see RFC 2616).
  96. """
  97. etag = md5()
  98. etag.update(text.encode("utf-8"))
  99. return '"%s"' % etag.hexdigest()
  100. def get_uid(item):
  101. """UID value of an item if defined."""
  102. return hasattr(item, "uid") and item.uid.value
  103. def sanitize_path(path):
  104. """Make path absolute with leading slash to prevent access to other data.
  105. Preserve a potential trailing slash.
  106. """
  107. trailing_slash = "/" if path.endswith("/") else ""
  108. path = posixpath.normpath(path)
  109. new_path = "/"
  110. for part in path.split("/"):
  111. if not is_safe_path_component(part):
  112. continue
  113. new_path = posixpath.join(new_path, part)
  114. trailing_slash = "" if new_path.endswith("/") else trailing_slash
  115. return new_path + trailing_slash
  116. def is_safe_path_component(path):
  117. """Check if path is a single component of a path.
  118. Check that the path is safe to join too.
  119. """
  120. return path and "/" not in path and path not in (".", "..")
  121. def is_safe_filesystem_path_component(path):
  122. """Check if path is a single component of a local and posix filesystem
  123. path.
  124. Check that the path is safe to join too.
  125. """
  126. return (
  127. path and not os.path.splitdrive(path)[0] and
  128. not os.path.split(path)[0] and path not in (os.curdir, os.pardir) and
  129. not path.startswith(".") and not path.endswith("~") and
  130. is_safe_path_component(path))
  131. def path_to_filesystem(root, *paths):
  132. """Convert path to a local filesystem path relative to base_folder.
  133. `root` must be a secure filesystem path, it will be prepend to the path.
  134. Conversion of `paths` is done in a secure manner, or raises ``ValueError``.
  135. """
  136. paths = [sanitize_path(path).strip("/") for path in paths]
  137. safe_path = root
  138. for path in paths:
  139. if not path:
  140. continue
  141. for part in path.split("/"):
  142. if not is_safe_filesystem_path_component(part):
  143. raise UnsafePathError(part)
  144. safe_path_parent = safe_path
  145. safe_path = os.path.join(safe_path, part)
  146. # Check for conflicting files (e.g. case-insensitive file systems
  147. # or short names on Windows file systems)
  148. if os.path.lexists(safe_path):
  149. if part not in os.listdir(safe_path_parent):
  150. raise CollidingPathError(part)
  151. return safe_path
  152. class UnsafePathError(ValueError):
  153. def __init__(self, path):
  154. message = "Can't translate name safely to filesystem: %r" % path
  155. super().__init__(message)
  156. class CollidingPathError(ValueError):
  157. def __init__(self, path):
  158. message = "File name collision: %r" % path
  159. super().__init__(message)
  160. class ComponentExistsError(ValueError):
  161. def __init__(self, path):
  162. message = "Component already exists: %r" % path
  163. super().__init__(message)
  164. class ComponentNotFoundError(ValueError):
  165. def __init__(self, path):
  166. message = "Component doesn't exist: %r" % path
  167. super().__init__(message)
  168. class Item:
  169. def __init__(self, collection, item, href, last_modified=None):
  170. self.collection = collection
  171. self.item = item
  172. self.href = href
  173. self.last_modified = last_modified
  174. def __getattr__(self, attr):
  175. return getattr(self.item, attr)
  176. @property
  177. def etag(self):
  178. """Encoded as quoted-string (see RFC 2616)."""
  179. return get_etag(self.serialize())
  180. class BaseCollection:
  181. # Overriden on copy by the "load" function
  182. configuration = None
  183. logger = None
  184. def __init__(self, path, principal=False):
  185. """Initialize the collection.
  186. ``path`` must be the normalized relative path of the collection, using
  187. the slash as the folder delimiter, with no leading nor trailing slash.
  188. """
  189. raise NotImplementedError
  190. @classmethod
  191. def discover(cls, path, depth="0"):
  192. """Discover a list of collections under the given ``path``.
  193. If ``depth`` is "0", only the actual object under ``path`` is
  194. returned.
  195. If ``depth`` is anything but "0", it is considered as "1" and direct
  196. children are included in the result.
  197. The ``path`` is relative.
  198. The root collection "/" must always exist.
  199. """
  200. raise NotImplementedError
  201. @classmethod
  202. def move(cls, item, to_collection, to_href):
  203. """Move an object.
  204. ``item`` is the item to move.
  205. ``to_collection`` is the target collection.
  206. ``to_href`` is the target name in ``to_collection``. An item with the
  207. same name might already exist.
  208. """
  209. if item.collection.path == to_collection.path and item.href == to_href:
  210. return
  211. to_collection.upload(to_href, item.item)
  212. item.collection.delete(item.href)
  213. @property
  214. def etag(self):
  215. """Encoded as quoted-string (see RFC 2616)."""
  216. return get_etag(self.serialize())
  217. @classmethod
  218. def create_collection(cls, href, collection=None, props=None):
  219. """Create a collection.
  220. If the collection already exists and neither ``collection`` nor
  221. ``props`` are set, this method shouldn't do anything. Otherwise the
  222. existing collection must be replaced.
  223. ``collection`` is a list of vobject components.
  224. ``props`` are metadata values for the collection.
  225. ``props["tag"]`` is the type of collection (VCALENDAR or
  226. VADDRESSBOOK). If the key ``tag`` is missing, it is guessed from the
  227. collection.
  228. """
  229. raise NotImplementedError
  230. def sync(self, old_token=None):
  231. """Get the current sync token and changed items for synchronization.
  232. ``old_token`` an old sync token which is used as the base of the
  233. delta update. If sync token is missing, all items are returned.
  234. ValueError is raised for invalid or old tokens.
  235. WARNING: This simple default implementation treats all sync-token as
  236. invalid. It adheres to the specification but some clients
  237. (e.g. InfCloud) don't like it. Subclasses should provide a
  238. more sophisticated implementation.
  239. """
  240. token = "http://radicale.org/ns/sync/%s" % self.etag.strip("\"")
  241. if old_token:
  242. raise ValueError("Sync token are not supported")
  243. return token, self.list()
  244. def list(self):
  245. """List collection items."""
  246. raise NotImplementedError
  247. def get(self, href):
  248. """Fetch a single item."""
  249. raise NotImplementedError
  250. def get_multi(self, hrefs):
  251. """Fetch multiple items. Duplicate hrefs must be ignored.
  252. Functionally similar to ``get``, but might bring performance benefits
  253. on some storages when used cleverly.
  254. """
  255. for href in set(hrefs):
  256. yield self.get(href)
  257. def pre_filtered_list(self, filters):
  258. """List collection items with optional pre filtering.
  259. This could largely improve performance of reports depending on
  260. the filters and this implementation.
  261. This returns all event by default
  262. """
  263. return [self.get(href) for href in self.list()]
  264. def has(self, href):
  265. """Check if an item exists by its href.
  266. Functionally similar to ``get``, but might bring performance benefits
  267. on some storages when used cleverly.
  268. """
  269. return self.get(href) is not None
  270. def upload(self, href, vobject_item):
  271. """Upload a new or replace an existing item."""
  272. raise NotImplementedError
  273. def delete(self, href=None):
  274. """Delete an item.
  275. When ``href`` is ``None``, delete the collection.
  276. """
  277. raise NotImplementedError
  278. def get_meta(self, key):
  279. """Get metadata value for collection."""
  280. raise NotImplementedError
  281. def set_meta(self, props):
  282. """Set metadata values for collection."""
  283. raise NotImplementedError
  284. @property
  285. def last_modified(self):
  286. """Get the HTTP-datetime of when the collection was modified."""
  287. raise NotImplementedError
  288. def serialize(self):
  289. """Get the unicode string representing the whole collection."""
  290. raise NotImplementedError
  291. @classmethod
  292. @contextmanager
  293. def acquire_lock(cls, mode, user=None):
  294. """Set a context manager to lock the whole storage.
  295. ``mode`` must either be "r" for shared access or "w" for exclusive
  296. access.
  297. ``user`` is the name of the logged in user or empty.
  298. """
  299. raise NotImplementedError
  300. class Collection(BaseCollection):
  301. """Collection stored in several files per calendar."""
  302. def __init__(self, path, principal=False, folder=None):
  303. if not folder:
  304. folder = self._get_collection_root_folder()
  305. # Path should already be sanitized
  306. self.path = sanitize_path(path).strip("/")
  307. self.encoding = self.configuration.get("encoding", "stock")
  308. self._filesystem_path = path_to_filesystem(folder, self.path)
  309. self._props_path = os.path.join(
  310. self._filesystem_path, ".Radicale.props")
  311. split_path = self.path.split("/")
  312. self.owner = split_path[0] if len(split_path) > 1 else None
  313. self.is_principal = principal
  314. @classmethod
  315. def _get_collection_root_folder(cls):
  316. filesystem_folder = os.path.expanduser(
  317. cls.configuration.get("storage", "filesystem_folder"))
  318. return os.path.join(filesystem_folder, "collection-root")
  319. @contextmanager
  320. def _atomic_write(self, path, mode="w", newline=None):
  321. directory = os.path.dirname(path)
  322. tmp = NamedTemporaryFile(
  323. mode=mode, dir=directory, delete=False, prefix=".Radicale.tmp-",
  324. newline=newline, encoding=None if "b" in mode else self.encoding)
  325. try:
  326. yield tmp
  327. self._fsync(tmp.fileno())
  328. tmp.close()
  329. os.replace(tmp.name, path)
  330. except:
  331. tmp.close()
  332. os.remove(tmp.name)
  333. raise
  334. self._sync_directory(directory)
  335. @staticmethod
  336. def _find_available_file_name(exists_fn):
  337. # Prevent infinite loop
  338. for _ in range(10000):
  339. file_name = hex(getrandbits(32))[2:]
  340. if not exists_fn(file_name):
  341. return file_name
  342. raise FileExistsError(errno.EEXIST, "No usable file name found")
  343. @classmethod
  344. def _fsync(cls, fd):
  345. if cls.configuration.getboolean("storage", "filesystem_fsync"):
  346. if os.name == "posix" and hasattr(fcntl, "F_FULLFSYNC"):
  347. fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
  348. else:
  349. os.fsync(fd)
  350. @classmethod
  351. def _sync_directory(cls, path):
  352. """Sync directory to disk.
  353. This only works on POSIX and does nothing on other systems.
  354. """
  355. if not cls.configuration.getboolean("storage", "filesystem_fsync"):
  356. return
  357. if os.name == "posix":
  358. fd = os.open(path, 0)
  359. try:
  360. cls._fsync(fd)
  361. finally:
  362. os.close(fd)
  363. @classmethod
  364. def _makedirs_synced(cls, filesystem_path):
  365. """Recursively create a directory and its parents in a sync'ed way.
  366. This method acts silently when the folder already exists.
  367. """
  368. if os.path.isdir(filesystem_path):
  369. return
  370. parent_filesystem_path = os.path.dirname(filesystem_path)
  371. # Prevent infinite loop
  372. if filesystem_path != parent_filesystem_path:
  373. # Create parent dirs recursively
  374. cls._makedirs_synced(parent_filesystem_path)
  375. # Possible race!
  376. os.makedirs(filesystem_path, exist_ok=True)
  377. cls._sync_directory(parent_filesystem_path)
  378. @classmethod
  379. def discover(cls, path, depth="0"):
  380. # Path should already be sanitized
  381. sane_path = sanitize_path(path).strip("/")
  382. attributes = sane_path.split("/")
  383. if not attributes[0]:
  384. attributes.pop()
  385. folder = cls._get_collection_root_folder()
  386. # Create the root collection
  387. cls._makedirs_synced(folder)
  388. try:
  389. filesystem_path = path_to_filesystem(folder, sane_path)
  390. except ValueError as e:
  391. # Path is unsafe
  392. cls.logger.debug("Collection with unsafe path %r requested: %s",
  393. sane_path, e, exc_info=True)
  394. return
  395. # Check if the path exists and if it leads to a collection or an item
  396. if not os.path.isdir(filesystem_path):
  397. if attributes and os.path.isfile(filesystem_path):
  398. href = attributes.pop()
  399. else:
  400. return
  401. else:
  402. href = None
  403. path = "/".join(attributes)
  404. principal = len(attributes) == 1
  405. collection = cls(path, principal)
  406. if href:
  407. yield collection.get(href)
  408. return
  409. yield collection
  410. if depth == "0":
  411. return
  412. for item in collection.list():
  413. yield collection.get(item)
  414. for href in os.listdir(filesystem_path):
  415. if not is_safe_filesystem_path_component(href):
  416. if not href.startswith(".Radicale"):
  417. cls.logger.debug("Skipping collection %r in %r", href,
  418. path)
  419. continue
  420. child_filesystem_path = path_to_filesystem(filesystem_path, href)
  421. if os.path.isdir(child_filesystem_path):
  422. child_path = posixpath.join(path, href)
  423. child_principal = len(attributes) == 0
  424. yield cls(child_path, child_principal)
  425. @classmethod
  426. def create_collection(cls, href, collection=None, props=None):
  427. folder = cls._get_collection_root_folder()
  428. # Path should already be sanitized
  429. sane_path = sanitize_path(href).strip("/")
  430. attributes = sane_path.split("/")
  431. if not attributes[0]:
  432. attributes.pop()
  433. principal = len(attributes) == 1
  434. filesystem_path = path_to_filesystem(folder, sane_path)
  435. if not props:
  436. props = {}
  437. if not props.get("tag") and collection:
  438. props["tag"] = collection[0].name
  439. if not props:
  440. cls._makedirs_synced(filesystem_path)
  441. return cls(sane_path, principal=principal)
  442. parent_dir = os.path.dirname(filesystem_path)
  443. cls._makedirs_synced(parent_dir)
  444. # Create a temporary directory with an unsafe name
  445. with TemporaryDirectory(
  446. prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
  447. # The temporary directory itself can't be renamed
  448. tmp_filesystem_path = os.path.join(tmp_dir, "collection")
  449. os.makedirs(tmp_filesystem_path)
  450. self = cls("/", principal=principal, folder=tmp_filesystem_path)
  451. self.set_meta(props)
  452. if collection:
  453. if props.get("tag") == "VCALENDAR":
  454. collection, = collection
  455. items = []
  456. for content in ("vevent", "vtodo", "vjournal"):
  457. items.extend(
  458. getattr(collection, "%s_list" % content, []))
  459. items_by_uid = groupby(sorted(items, key=get_uid), get_uid)
  460. vobject_items = {}
  461. for uid, items in items_by_uid:
  462. new_collection = vobject.iCalendar()
  463. for item in items:
  464. new_collection.add(item)
  465. href = self._find_available_file_name(
  466. vobject_items.get)
  467. vobject_items[href] = new_collection
  468. self.upload_all_nonatomic(vobject_items)
  469. elif props.get("tag") == "VCARD":
  470. vobject_items = {}
  471. for card in collection:
  472. href = self._find_available_file_name(
  473. vobject_items.get)
  474. vobject_items[href] = card
  475. self.upload_all_nonatomic(vobject_items)
  476. # This operation is not atomic on the filesystem level but it's
  477. # very unlikely that one rename operations succeeds while the
  478. # other fails or that only one gets written to disk.
  479. if os.path.exists(filesystem_path):
  480. os.rename(filesystem_path, os.path.join(tmp_dir, "delete"))
  481. os.rename(tmp_filesystem_path, filesystem_path)
  482. cls._sync_directory(parent_dir)
  483. return cls(sane_path, principal=principal)
  484. def upload_all_nonatomic(self, vobject_items):
  485. """Upload a new set of items.
  486. This takes a mapping of href and vobject items and
  487. uploads them nonatomic and without existence checks.
  488. """
  489. with contextlib.ExitStack() as stack:
  490. fs = []
  491. for href, item in vobject_items.items():
  492. if not is_safe_filesystem_path_component(href):
  493. raise UnsafePathError(href)
  494. path = path_to_filesystem(self._filesystem_path, href)
  495. fs.append(stack.enter_context(
  496. open(path, "w", encoding=self.encoding, newline="")))
  497. fs[-1].write(item.serialize())
  498. # sync everything at once because it's slightly faster.
  499. for f in fs:
  500. self._fsync(f.fileno())
  501. self._sync_directory(self._filesystem_path)
  502. @classmethod
  503. def move(cls, item, to_collection, to_href):
  504. if not is_safe_filesystem_path_component(to_href):
  505. raise UnsafePathError(to_href)
  506. os.replace(
  507. path_to_filesystem(item.collection._filesystem_path, item.href),
  508. path_to_filesystem(to_collection._filesystem_path, to_href))
  509. cls._sync_directory(to_collection._filesystem_path)
  510. if item.collection._filesystem_path != to_collection._filesystem_path:
  511. cls._sync_directory(item.collection._filesystem_path)
  512. @classmethod
  513. def _clean_cache(cls, folder, names, max_age=None):
  514. """Delete all ``names`` in ``folder`` that are older than ``max_age``.
  515. """
  516. age_limit = time.time() - max_age if max_age is not None else None
  517. modified = False
  518. for name in names:
  519. if not is_safe_filesystem_path_component(name):
  520. continue
  521. if age_limit is not None:
  522. try:
  523. # Race: Another process might have deleted the file.
  524. mtime = os.path.getmtime(os.path.join(folder, name))
  525. except FileNotFoundError:
  526. continue
  527. if mtime > age_limit:
  528. continue
  529. cls.logger.debug("Found expired item in cache: %r", name)
  530. # Race: Another process might have deleted or locked the
  531. # file.
  532. try:
  533. os.remove(os.path.join(folder, name))
  534. except (FileNotFoundError, PermissionError):
  535. continue
  536. modified = True
  537. if modified:
  538. cls._sync_directory(folder)
  539. def list(self):
  540. for href in os.listdir(self._filesystem_path):
  541. if not is_safe_filesystem_path_component(href):
  542. if not href.startswith(".Radicale"):
  543. self.logger.debug(
  544. "Skipping item %r in %r", href, self.path)
  545. continue
  546. path = os.path.join(self._filesystem_path, href)
  547. if os.path.isfile(path):
  548. yield href
  549. def get(self, href):
  550. if not href:
  551. return None
  552. if not is_safe_filesystem_path_component(href):
  553. self.logger.debug("Can't translate name %r safely to filesystem "
  554. "in %r", href, self.path)
  555. return None
  556. path = path_to_filesystem(self._filesystem_path, href)
  557. if not os.path.isfile(path):
  558. return None
  559. with open(path, encoding=self.encoding, newline="") as f:
  560. text = f.read()
  561. last_modified = time.strftime(
  562. "%a, %d %b %Y %H:%M:%S GMT",
  563. time.gmtime(os.path.getmtime(path)))
  564. try:
  565. item = vobject.readOne(text)
  566. except Exception as e:
  567. raise RuntimeError("Failed to parse item %r in %r" %
  568. (href, self.path)) from e
  569. return Item(self, item, href, last_modified)
  570. def upload(self, href, vobject_item):
  571. if not is_safe_filesystem_path_component(href):
  572. raise UnsafePathError(href)
  573. path = path_to_filesystem(self._filesystem_path, href)
  574. item = Item(self, vobject_item, href)
  575. with self._atomic_write(path, newline="") as fd:
  576. fd.write(item.serialize())
  577. return item
  578. def delete(self, href=None):
  579. if href is None:
  580. # Delete the collection
  581. parent_dir = os.path.dirname(self._filesystem_path)
  582. try:
  583. os.rmdir(self._filesystem_path)
  584. except OSError:
  585. with TemporaryDirectory(
  586. prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
  587. os.rename(self._filesystem_path, os.path.join(
  588. tmp, os.path.basename(self._filesystem_path)))
  589. self._sync_directory(parent_dir)
  590. else:
  591. self._sync_directory(parent_dir)
  592. else:
  593. # Delete an item
  594. if not is_safe_filesystem_path_component(href):
  595. raise UnsafePathError(href)
  596. path = path_to_filesystem(self._filesystem_path, href)
  597. if not os.path.isfile(path):
  598. raise ComponentNotFoundError(href)
  599. os.remove(path)
  600. self._sync_directory(os.path.dirname(path))
  601. def get_meta(self, key=None):
  602. if os.path.exists(self._props_path):
  603. with open(self._props_path, encoding=self.encoding) as f:
  604. try:
  605. meta = json.load(f)
  606. except ValueError as e:
  607. raise RuntimeError("Failed to load properties of collect"
  608. "ion %r: %s" % (self.path, e)) from e
  609. return meta.get(key) if key else meta
  610. def set_meta(self, props):
  611. if os.path.exists(self._props_path):
  612. with open(self._props_path, encoding=self.encoding) as f:
  613. old_props = json.load(f)
  614. old_props.update(props)
  615. props = old_props
  616. props = {key: value for key, value in props.items() if value}
  617. with self._atomic_write(self._props_path, "w+") as f:
  618. json.dump(props, f)
  619. @property
  620. def last_modified(self):
  621. relevant_files = [self._filesystem_path] + [
  622. path_to_filesystem(self._filesystem_path, href)
  623. for href in self.list()]
  624. if os.path.exists(self._props_path):
  625. relevant_files.append(self._props_path)
  626. last = max(map(os.path.getmtime, relevant_files))
  627. return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
  628. def serialize(self):
  629. items = []
  630. time_begin = datetime.datetime.now()
  631. for href in self.list():
  632. items.append(self.get(href).item)
  633. time_end = datetime.datetime.now()
  634. self.logger.info(
  635. "Read %d items in %.3f seconds from %r", len(items),
  636. (time_end - time_begin).total_seconds(), self.path)
  637. if self.get_meta("tag") == "VCALENDAR":
  638. collection = vobject.iCalendar()
  639. for item in items:
  640. for content in ("vevent", "vtodo", "vjournal"):
  641. if content in item.contents:
  642. for item_part in getattr(item, "%s_list" % content):
  643. collection.add(item_part)
  644. break
  645. return collection.serialize()
  646. elif self.get_meta("tag") == "VADDRESSBOOK":
  647. return "".join([item.serialize() for item in items])
  648. return ""
  649. _lock = threading.Lock()
  650. _waiters = []
  651. _lock_file = None
  652. _lock_file_locked = False
  653. _readers = 0
  654. _writer = False
  655. @classmethod
  656. @contextmanager
  657. def acquire_lock(cls, mode, user=None):
  658. def condition():
  659. if mode == "r":
  660. return not cls._writer
  661. else:
  662. return not cls._writer and cls._readers == 0
  663. file_locking = cls.configuration.getboolean("storage",
  664. "filesystem_locking")
  665. folder = os.path.expanduser(cls.configuration.get(
  666. "storage", "filesystem_folder"))
  667. # Use a primitive lock which only works within one process as a
  668. # precondition for inter-process file-based locking
  669. with cls._lock:
  670. if cls._waiters or not condition():
  671. # Use FIFO for access requests
  672. waiter = threading.Condition(lock=cls._lock)
  673. cls._waiters.append(waiter)
  674. while True:
  675. waiter.wait()
  676. if condition():
  677. break
  678. cls._waiters.pop(0)
  679. if mode == "r":
  680. cls._readers += 1
  681. # Notify additional potential readers
  682. if cls._waiters:
  683. cls._waiters[0].notify()
  684. else:
  685. cls._writer = True
  686. if not cls._lock_file:
  687. cls._makedirs_synced(folder)
  688. lock_path = os.path.join(folder, ".Radicale.lock")
  689. cls._lock_file = open(lock_path, "w+")
  690. # Set access rights to a necessary minimum to prevent locking
  691. # by arbitrary users
  692. try:
  693. os.chmod(lock_path, stat.S_IWUSR | stat.S_IRUSR)
  694. except OSError as e:
  695. cls.logger.info("Failed to set permissions on lock file:"
  696. " %s", e, exc_info=True)
  697. if file_locking and not cls._lock_file_locked:
  698. if os.name == "nt":
  699. handle = msvcrt.get_osfhandle(cls._lock_file.fileno())
  700. flags = LOCKFILE_EXCLUSIVE_LOCK if mode == "w" else 0
  701. overlapped = Overlapped()
  702. if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
  703. raise RuntimeError("Locking the storage failed: %s" %
  704. ctypes.FormatError())
  705. elif os.name == "posix":
  706. _cmd = fcntl.LOCK_EX if mode == "w" else fcntl.LOCK_SH
  707. try:
  708. fcntl.flock(cls._lock_file.fileno(), _cmd)
  709. except OSError as e:
  710. raise RuntimeError("Locking the storage failed: %s" %
  711. e) from e
  712. else:
  713. raise RuntimeError("Locking the storage failed: "
  714. "Unsupported operating system")
  715. cls._lock_file_locked = True
  716. try:
  717. yield
  718. # execute hook
  719. hook = cls.configuration.get("storage", "hook")
  720. if mode == "w" and hook:
  721. cls.logger.debug("Running hook")
  722. subprocess.check_call(
  723. hook % {"user": shlex.quote(user or "Anonymous")},
  724. shell=True, cwd=folder)
  725. finally:
  726. with cls._lock:
  727. if mode == "r":
  728. cls._readers -= 1
  729. else:
  730. cls._writer = False
  731. if file_locking and cls._readers == 0:
  732. if os.name == "nt":
  733. handle = msvcrt.get_osfhandle(cls._lock_file.fileno())
  734. overlapped = Overlapped()
  735. if not unlock_file_ex(handle, 0, 1, 0, overlapped):
  736. raise RuntimeError("Unlocking the storage failed: "
  737. "%s" % ctypes.FormatError())
  738. elif os.name == "posix":
  739. try:
  740. fcntl.flock(cls._lock_file.fileno(), fcntl.LOCK_UN)
  741. except OSError as e:
  742. raise RuntimeError("Unlocking the storage failed: "
  743. "%s" % e) from e
  744. else:
  745. raise RuntimeError("Unlocking the storage failed: "
  746. "Unsupported operating system")
  747. cls._lock_file_locked = False
  748. if cls._waiters:
  749. cls._waiters[0].notify()
  750. if (cls.configuration.getboolean(
  751. "storage", "filesystem_close_lock_file") and
  752. cls._readers == 0 and not cls._waiters):
  753. cls._lock_file.close()
  754. cls._lock_file = None