Просмотр исходного кода

Type hints for multifilesystem

Unrud 4 лет назад
Родитель
Сommit
698ae875ce

+ 39 - 107
radicale/storage/multifilesystem/__init__.py

@@ -23,75 +23,57 @@ Uses one folder per collection and one file per collection entry.
 
 """
 
-import contextlib
 import os
 import time
-from itertools import chain
-from tempfile import TemporaryDirectory
+from typing import Iterator, Optional
 
-from radicale import pathutils, storage
-from radicale.storage.multifilesystem.cache import CollectionCacheMixin
+from radicale import config
+from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
+from radicale.storage.multifilesystem.cache import CollectionPartCache
 from radicale.storage.multifilesystem.create_collection import \
-    StorageCreateCollectionMixin
-from radicale.storage.multifilesystem.delete import CollectionDeleteMixin
-from radicale.storage.multifilesystem.discover import StorageDiscoverMixin
-from radicale.storage.multifilesystem.get import CollectionGetMixin
-from radicale.storage.multifilesystem.history import CollectionHistoryMixin
-from radicale.storage.multifilesystem.lock import (CollectionLockMixin,
-                                                   StorageLockMixin)
-from radicale.storage.multifilesystem.meta import CollectionMetaMixin
-from radicale.storage.multifilesystem.move import StorageMoveMixin
-from radicale.storage.multifilesystem.sync import CollectionSyncMixin
-from radicale.storage.multifilesystem.upload import CollectionUploadMixin
-from radicale.storage.multifilesystem.verify import StorageVerifyMixin
+    StoragePartCreateCollection
+from radicale.storage.multifilesystem.delete import CollectionPartDelete
+from radicale.storage.multifilesystem.discover import StoragePartDiscover
+from radicale.storage.multifilesystem.get import CollectionPartGet
+from radicale.storage.multifilesystem.history import CollectionPartHistory
+from radicale.storage.multifilesystem.lock import (CollectionPartLock,
+                                                   StoragePartLock)
+from radicale.storage.multifilesystem.meta import CollectionPartMeta
+from radicale.storage.multifilesystem.move import StoragePartMove
+from radicale.storage.multifilesystem.sync import CollectionPartSync
+from radicale.storage.multifilesystem.upload import CollectionPartUpload
+from radicale.storage.multifilesystem.verify import StoragePartVerify
 
 
 class Collection(
-        CollectionCacheMixin, CollectionDeleteMixin, CollectionGetMixin,
-        CollectionHistoryMixin, CollectionLockMixin, CollectionMetaMixin,
-        CollectionSyncMixin, CollectionUploadMixin, storage.BaseCollection):
-
-    def __init__(self, storage_, path, filesystem_path=None):
-        self._storage = storage_
-        folder = self._storage._get_collection_root_folder()
-        # Path should already be sanitized
-        self._path = pathutils.strip_path(path)
-        self._encoding = self._storage.configuration.get("encoding", "stock")
-        if filesystem_path is None:
-            filesystem_path = pathutils.path_to_filesystem(folder, self.path)
-        self._filesystem_path = filesystem_path
+        CollectionPartDelete, CollectionPartMeta, CollectionPartSync,
+        CollectionPartUpload, CollectionPartGet, CollectionPartCache,
+        CollectionPartLock, CollectionPartHistory, CollectionBase):
+
+    _etag_cache: Optional[str]
+
+    def __init__(self, storage_: "Storage", path: str,
+                 filesystem_path: Optional[str] = None) -> None:
+        super().__init__(storage_, path, filesystem_path)
         self._etag_cache = None
-        super().__init__()
 
     @property
-    def path(self):
+    def path(self) -> str:
         return self._path
 
-    @contextlib.contextmanager
-    def _atomic_write(self, path, mode="w", newline=None):
-        parent_dir, name = os.path.split(path)
-        # Do not use mkstemp because it creates with permissions 0o600
-        with TemporaryDirectory(
-                prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
-            with open(os.path.join(tmp_dir, name), mode, newline=newline,
-                      encoding=None if "b" in mode else self._encoding) as tmp:
-                yield tmp
-                tmp.flush()
-                self._storage._fsync(tmp)
-            os.replace(os.path.join(tmp_dir, name), path)
-        self._storage._sync_directory(parent_dir)
-
     @property
-    def last_modified(self):
-        relevant_files = chain(
-            (self._filesystem_path,),
-            (self._props_path,) if os.path.exists(self._props_path) else (),
-            (os.path.join(self._filesystem_path, h) for h in self._list()))
-        last = max(map(os.path.getmtime, relevant_files))
+    def last_modified(self) -> str:
+        def relevant_files_iter() -> Iterator[str]:
+            yield self._filesystem_path
+            if os.path.exists(self._props_path):
+                yield self._props_path
+            for href in self._list():
+                yield os.path.join(self._filesystem_path, href)
+        last = max(map(os.path.getmtime, relevant_files_iter()))
         return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(last))
 
     @property
-    def etag(self):
+    def etag(self) -> str:
         # reuse cached value if the storage is read-only
         if self._storage._lock.locked == "w" or self._etag_cache is None:
             self._etag_cache = super().etag
@@ -99,61 +81,11 @@ class Collection(
 
 
 class Storage(
-        StorageCreateCollectionMixin, StorageDiscoverMixin, StorageLockMixin,
-        StorageMoveMixin, StorageVerifyMixin, storage.BaseStorage):
+        StoragePartCreateCollection, StoragePartLock, StoragePartMove,
+        StoragePartVerify, StoragePartDiscover, StorageBase):
 
     _collection_class = Collection
 
-    def __init__(self, configuration):
+    def __init__(self, configuration: config.Configuration) -> None:
         super().__init__(configuration)
-        folder = configuration.get("storage", "filesystem_folder")
-        self._makedirs_synced(folder)
-
-    def _get_collection_root_folder(self):
-        filesystem_folder = self.configuration.get(
-            "storage", "filesystem_folder")
-        return os.path.join(filesystem_folder, "collection-root")
-
-    def _fsync(self, f):
-        if self.configuration.get("storage", "_filesystem_fsync"):
-            try:
-                pathutils.fsync(f.fileno())
-            except OSError as e:
-                raise RuntimeError("Fsync'ing file %r failed: %s" %
-                                   (f.name, e)) from e
-
-    def _sync_directory(self, path):
-        """Sync directory to disk.
-
-        This only works on POSIX and does nothing on other systems.
-
-        """
-        if not self.configuration.get("storage", "_filesystem_fsync"):
-            return
-        if os.name == "posix":
-            try:
-                fd = os.open(path, 0)
-                try:
-                    pathutils.fsync(fd)
-                finally:
-                    os.close(fd)
-            except OSError as e:
-                raise RuntimeError("Fsync'ing directory %r failed: %s" %
-                                   (path, e)) from e
-
-    def _makedirs_synced(self, filesystem_path):
-        """Recursively create a directory and its parents in a sync'ed way.
-
-        This method acts silently when the folder already exists.
-
-        """
-        if os.path.isdir(filesystem_path):
-            return
-        parent_filesystem_path = os.path.dirname(filesystem_path)
-        # Prevent infinite loop
-        if filesystem_path != parent_filesystem_path:
-            # Create parent dirs recursively
-            self._makedirs_synced(parent_filesystem_path)
-        # Possible race!
-        os.makedirs(filesystem_path, exist_ok=True)
-        self._sync_directory(parent_filesystem_path)
+        self._makedirs_synced(self._filesystem_folder)

+ 121 - 0
radicale/storage/multifilesystem/base.py

@@ -0,0 +1,121 @@
+# This file is part of Radicale Server - Calendar Server
+# Copyright © 2014 Jean-Marc Martins
+# Copyright © 2012-2017 Guillaume Ayoub
+# Copyright © 2017-2019 Unrud <unrud@outlook.com>
+#
+# This library is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Radicale.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+from tempfile import TemporaryDirectory
+from typing import IO, AnyStr, Iterator, Optional, Type
+
+from radicale import config, pathutils, storage, types
+from radicale.storage import multifilesystem  # noqa:F401
+
+
+class CollectionBase(storage.BaseCollection):
+
+    _storage: "multifilesystem.Storage"
+    _path: str
+    _encoding: str
+    _filesystem_path: str
+
+    def __init__(self, storage_: "multifilesystem.Storage", path: str,
+                 filesystem_path: Optional[str] = None) -> None:
+        super().__init__()
+        self._storage = storage_
+        folder = storage_._get_collection_root_folder()
+        # Path should already be sanitized
+        self._path = pathutils.strip_path(path)
+        self._encoding = storage_.configuration.get("encoding", "stock")
+        if filesystem_path is None:
+            filesystem_path = pathutils.path_to_filesystem(folder, self.path)
+        self._filesystem_path = filesystem_path
+
+    @types.contextmanager
+    def _atomic_write(self, path: str, mode: str = "w",
+                      newline: Optional[str] = None) -> Iterator[IO[AnyStr]]:
+        # TODO: Overload with Literal when dropping support for Python < 3.8
+        parent_dir, name = os.path.split(path)
+        # Do not use mkstemp because it creates with permissions 0o600
+        with TemporaryDirectory(
+                prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
+            with open(os.path.join(tmp_dir, name), mode, newline=newline,
+                      encoding=None if "b" in mode else self._encoding) as tmp:
+                yield tmp
+                tmp.flush()
+                self._storage._fsync(tmp)
+            os.replace(os.path.join(tmp_dir, name), path)
+        self._storage._sync_directory(parent_dir)
+
+
+class StorageBase(storage.BaseStorage):
+
+    _collection_class: Type["multifilesystem.Collection"]
+    _filesystem_folder: str
+    _filesystem_fsync: bool
+
+    def __init__(self, configuration: config.Configuration) -> None:
+        super().__init__(configuration)
+        self._filesystem_folder = configuration.get(
+            "storage", "filesystem_folder")
+        self._filesystem_fsync = configuration.get(
+            "storage", "_filesystem_fsync")
+
+    def _get_collection_root_folder(self) -> str:
+        return os.path.join(self._filesystem_folder, "collection-root")
+
+    def _fsync(self, f: IO[AnyStr]) -> None:
+        if self._filesystem_fsync:
+            try:
+                pathutils.fsync(f.fileno())
+            except OSError as e:
+                raise RuntimeError("Fsync'ing file %r failed: %s" %
+                                   (f.name, e)) from e
+
+    def _sync_directory(self, path: str) -> None:
+        """Sync directory to disk.
+
+        This only works on POSIX and does nothing on other systems.
+
+        """
+        if not self._filesystem_fsync:
+            return
+        if os.name == "posix":
+            try:
+                fd = os.open(path, 0)
+                try:
+                    pathutils.fsync(fd)
+                finally:
+                    os.close(fd)
+            except OSError as e:
+                raise RuntimeError("Fsync'ing directory %r failed: %s" %
+                                   (path, e)) from e
+
+    def _makedirs_synced(self, filesystem_path: str) -> None:
+        """Recursively create a directory and its parents in a sync'ed way.
+
+        This method acts silently when the folder already exists.
+
+        """
+        if os.path.isdir(filesystem_path):
+            return
+        parent_filesystem_path = os.path.dirname(filesystem_path)
+        # Prevent infinite loop
+        if filesystem_path != parent_filesystem_path:
+            # Create parent dirs recursively
+            self._makedirs_synced(parent_filesystem_path)
+        # Possible race!
+        os.makedirs(filesystem_path, exist_ok=True)
+        self._sync_directory(parent_filesystem_path)

+ 34 - 21
radicale/storage/multifilesystem/cache.py

@@ -21,16 +21,27 @@ import os
 import pickle
 import time
 from hashlib import sha256
+from typing import BinaryIO, Iterable, NamedTuple, Optional, cast
 
+import radicale.item as radicale_item
 from radicale import pathutils, storage
 from radicale.log import logger
+from radicale.storage.multifilesystem.base import CollectionBase
 
+CacheContent = NamedTuple("CacheContent", [
+    ("uid", str), ("etag", str), ("text", str), ("name", str), ("tag", str),
+    ("start", int), ("end", int)])
 
-class CollectionCacheMixin:
-    def _clean_cache(self, folder, names, max_age=None):
+
+class CollectionPartCache(CollectionBase):
+
+    def _clean_cache(self, folder: str, names: Iterable[str],
+                     max_age: int = 0) -> None:
         """Delete all ``names`` in ``folder`` that are older than ``max_age``.
         """
-        age_limit = time.time() - max_age if max_age is not None else None
+        age_limit: Optional[float] = None
+        if max_age is not None and max_age > 0:
+            age_limit = time.time() - max_age
         modified = False
         for name in names:
             if not pathutils.is_safe_filesystem_path_component(name):
@@ -55,47 +66,49 @@ class CollectionCacheMixin:
             self._storage._sync_directory(folder)
 
     @staticmethod
-    def _item_cache_hash(raw_text):
+    def _item_cache_hash(raw_text: bytes) -> str:
         _hash = sha256()
         _hash.update(storage.CACHE_VERSION)
         _hash.update(raw_text)
         return _hash.hexdigest()
 
-    def _item_cache_content(self, item, cache_hash=None):
-        text = item.serialize()
-        if cache_hash is None:
-            cache_hash = self._item_cache_hash(text.encode(self._encoding))
-        return (cache_hash, item.uid, item.etag, text, item.name,
-                item.component_name, *item.time_range)
+    def _item_cache_content(self, item: radicale_item.Item) -> CacheContent:
+        return CacheContent(item.uid, item.etag, item.serialize(), item.name,
+                            item.component_name, *item.time_range)
 
-    def _store_item_cache(self, href, item, cache_hash=None):
+    def _store_item_cache(self, href: str, item: radicale_item.Item,
+                          cache_hash: str = "") -> CacheContent:
+        if not cache_hash:
+            cache_hash = self._item_cache_hash(
+                item.serialize().encode(self._encoding))
         cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
                                     "item")
-        content = self._item_cache_content(item, cache_hash)
+        content = self._item_cache_content(item)
         self._storage._makedirs_synced(cache_folder)
         # Race: Other processes might have created and locked the file.
         with contextlib.suppress(PermissionError), self._atomic_write(
-                os.path.join(cache_folder, href), "wb") as f:
-            pickle.dump(content, f)
+                os.path.join(cache_folder, href), "wb") as fo:
+            fb = cast(BinaryIO, fo)
+            pickle.dump((cache_hash, *content), fb)
         return content
 
-    def _load_item_cache(self, href, input_hash):
+    def _load_item_cache(self, href: str, cache_hash: str
+                         ) -> Optional[CacheContent]:
         cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
                                     "item")
-        cache_hash = uid = etag = text = name = tag = start = end = None
         try:
             with open(os.path.join(cache_folder, href), "rb") as f:
-                cache_hash, *content = pickle.load(f)
-                if cache_hash == input_hash:
-                    uid, etag, text, name, tag, start, end = content
+                hash_, *remainder = pickle.load(f)
+                if hash_ and hash_ == cache_hash:
+                    return CacheContent(*remainder)
         except FileNotFoundError:
             pass
         except (pickle.UnpicklingError, ValueError) as e:
             logger.warning("Failed to load item cache entry %r in %r: %s",
                            href, self.path, e, exc_info=True)
-        return cache_hash, uid, etag, text, name, tag, start, end
+        return None
 
-    def _clean_item_cache(self):
+    def _clean_item_cache(self) -> None:
         cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
                                     "item")
         self._clean_cache(cache_folder, (

+ 16 - 7
radicale/storage/multifilesystem/create_collection.py

@@ -18,13 +18,19 @@
 
 import os
 from tempfile import TemporaryDirectory
+from typing import Iterable, Optional, cast
 
+import radicale.item as radicale_item
 from radicale import pathutils
+from radicale.storage import multifilesystem
+from radicale.storage.multifilesystem.base import StorageBase
 
 
-class StorageCreateCollectionMixin:
+class StoragePartCreateCollection(StorageBase):
 
-    def create_collection(self, href, items=None, props=None):
+    def create_collection(self, href: str,
+                          items: Optional[Iterable[radicale_item.Item]] = None,
+                          props=None) -> "multifilesystem.Collection":
         folder = self._get_collection_root_folder()
 
         # Path should already be sanitized
@@ -34,19 +40,21 @@ class StorageCreateCollectionMixin:
         if not props:
             self._makedirs_synced(filesystem_path)
             return self._collection_class(
-                self, pathutils.unstrip_path(sane_path, True))
+                cast(multifilesystem.Storage, self),
+                pathutils.unstrip_path(sane_path, True))
 
         parent_dir = os.path.dirname(filesystem_path)
         self._makedirs_synced(parent_dir)
 
         # Create a temporary directory with an unsafe name
-        with TemporaryDirectory(
-                prefix=".Radicale.tmp-", dir=parent_dir) as tmp_dir:
+        with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
+                                ) as tmp_dir:
             # The temporary directory itself can't be renamed
             tmp_filesystem_path = os.path.join(tmp_dir, "collection")
             os.makedirs(tmp_filesystem_path)
             col = self._collection_class(
-                self, pathutils.unstrip_path(sane_path, True),
+                cast(multifilesystem.Storage, self),
+                pathutils.unstrip_path(sane_path, True),
                 filesystem_path=tmp_filesystem_path)
             col.set_meta(props)
             if items is not None:
@@ -62,4 +70,5 @@ class StorageCreateCollectionMixin:
             self._sync_directory(parent_dir)
 
         return self._collection_class(
-            self, pathutils.unstrip_path(sane_path, True))
+            cast(multifilesystem.Storage, self),
+            pathutils.unstrip_path(sane_path, True))

+ 8 - 4
radicale/storage/multifilesystem/delete.py

@@ -18,20 +18,24 @@
 
 import os
 from tempfile import TemporaryDirectory
+from typing import Optional
 
 from radicale import pathutils, storage
+from radicale.storage.multifilesystem.base import CollectionBase
+from radicale.storage.multifilesystem.history import CollectionPartHistory
 
 
-class CollectionDeleteMixin:
-    def delete(self, href=None):
+class CollectionPartDelete(CollectionPartHistory, CollectionBase):
+
+    def delete(self, href: Optional[str] = None) -> None:
         if href is None:
             # Delete the collection
             parent_dir = os.path.dirname(self._filesystem_path)
             try:
                 os.rmdir(self._filesystem_path)
             except OSError:
-                with TemporaryDirectory(
-                        prefix=".Radicale.tmp-", dir=parent_dir) as tmp:
+                with TemporaryDirectory(prefix=".Radicale.tmp-", dir=parent_dir
+                                        ) as tmp:
                     os.rename(self._filesystem_path, os.path.join(
                         tmp, os.path.basename(self._filesystem_path)))
                     self._storage._sync_directory(parent_dir)

+ 30 - 10
radicale/storage/multifilesystem/discover.py

@@ -16,18 +16,31 @@
 # You should have received a copy of the GNU General Public License
 # along with Radicale.  If not, see <http://www.gnu.org/licenses/>.
 
-import contextlib
 import os
 import posixpath
+from typing import Callable, ContextManager, Iterator, Optional, cast
 
-from radicale import pathutils
+from radicale import pathutils, types
 from radicale.log import logger
+from radicale.storage import multifilesystem
+from radicale.storage.multifilesystem.base import StorageBase
 
 
-class StorageDiscoverMixin:
+@types.contextmanager
+def _null_child_context_manager(path: str,
+                                href: Optional[str]) -> Iterator[None]:
+    yield
 
-    def discover(self, path, depth="0", child_context_manager=(
-            lambda path, href=None: contextlib.ExitStack())):
+
+class StoragePartDiscover(StorageBase):
+
+    def discover(
+            self, path: str, depth: str = "0", child_context_manager: Optional[
+                Callable[[str, Optional[str]], ContextManager[None]]] = None
+            ) -> Iterator[types.CollectionOrItem]:
+        # assert isinstance(self, multifilesystem.Storage)
+        if child_context_manager is None:
+            child_context_manager = _null_child_context_manager
         # Path should already be sanitized
         sane_path = pathutils.strip_path(path)
         attributes = sane_path.split("/") if sane_path else []
@@ -44,6 +57,7 @@ class StorageDiscoverMixin:
             return
 
         # Check if the path exists and if it leads to a collection or an item
+        href: Optional[str]
         if not os.path.isdir(filesystem_path):
             if attributes and os.path.isfile(filesystem_path):
                 href = attributes.pop()
@@ -54,10 +68,13 @@ class StorageDiscoverMixin:
 
         sane_path = "/".join(attributes)
         collection = self._collection_class(
-            self, pathutils.unstrip_path(sane_path, True))
+            cast(multifilesystem.Storage, self),
+            pathutils.unstrip_path(sane_path, True))
 
         if href:
-            yield collection._get(href)
+            item = collection._get(href)
+            if item is not None:
+                yield item
             return
 
         yield collection
@@ -67,7 +84,9 @@ class StorageDiscoverMixin:
 
         for href in collection._list():
             with child_context_manager(sane_path, href):
-                yield collection._get(href)
+                item = collection._get(href)
+                if item is not None:
+                    yield item
 
         for entry in os.scandir(filesystem_path):
             if not entry.is_dir():
@@ -80,5 +99,6 @@ class StorageDiscoverMixin:
                 continue
             sane_child_path = posixpath.join(sane_path, href)
             child_path = pathutils.unstrip_path(sane_child_path, True)
-            with child_context_manager(sane_child_path):
-                yield self._collection_class(self, child_path)
+            with child_context_manager(sane_child_path, None):
+                yield self._collection_class(
+                    cast(multifilesystem.Storage, self), child_path)

+ 41 - 27
radicale/storage/multifilesystem/get.py

@@ -19,20 +19,30 @@
 import os
 import sys
 import time
+from typing import Iterable, Iterator, Optional, Tuple
 
 import vobject
 
 import radicale.item as radicale_item
 from radicale import pathutils
 from radicale.log import logger
+from radicale.storage import multifilesystem
+from radicale.storage.multifilesystem.base import CollectionBase
+from radicale.storage.multifilesystem.cache import CollectionPartCache
+from radicale.storage.multifilesystem.lock import CollectionPartLock
 
 
-class CollectionGetMixin:
-    def __init__(self):
-        super().__init__()
+class CollectionPartGet(CollectionPartCache, CollectionPartLock,
+                        CollectionBase):
+
+    _item_cache_cleaned: bool
+
+    def __init__(self, storage_: "multifilesystem.Storage", path: str,
+                 filesystem_path: Optional[str] = None) -> None:
+        super().__init__(storage_, path, filesystem_path)
         self._item_cache_cleaned = False
 
-    def _list(self):
+    def _list(self) -> Iterator[str]:
         for entry in os.scandir(self._filesystem_path):
             if not entry.is_file():
                 continue
@@ -43,13 +53,14 @@ class CollectionGetMixin:
                 continue
             yield href
 
-    def _get(self, href, verify_href=True):
+    def _get(self, href: str, verify_href: bool = True
+             ) -> Optional[radicale_item.Item]:
         if verify_href:
             try:
                 if not pathutils.is_safe_filesystem_path_component(href):
                     raise pathutils.UnsafePathError(href)
-                path = pathutils.path_to_filesystem(
-                    self._filesystem_path, href)
+                path = pathutils.path_to_filesystem(self._filesystem_path,
+                                                    href)
             except ValueError as e:
                 logger.debug(
                     "Can't translate name %r safely to filesystem in %r: %s",
@@ -70,19 +81,17 @@ class CollectionGetMixin:
             raise
         # The hash of the component in the file system. This is used to check,
         # if the entry in the cache is still valid.
-        input_hash = self._item_cache_hash(raw_text)
-        cache_hash, uid, etag, text, name, tag, start, end = \
-            self._load_item_cache(href, input_hash)
-        if input_hash != cache_hash:
+        cache_hash = self._item_cache_hash(raw_text)
+        cache_content = self._load_item_cache(href, cache_hash)
+        if cache_content is None:
             with self._acquire_cache_lock("item"):
                 # Lock the item cache to prevent multpile processes from
                 # generating the same data in parallel.
                 # This improves the performance for multiple requests.
                 if self._storage._lock.locked == "r":
                     # Check if another process created the file in the meantime
-                    cache_hash, uid, etag, text, name, tag, start, end = \
-                        self._load_item_cache(href, input_hash)
-                if input_hash != cache_hash:
+                    cache_content = self._load_item_cache(href, cache_hash)
+                if cache_content is None:
                     try:
                         vobject_items = list(vobject.readComponents(
                             raw_text.decode(self._encoding)))
@@ -91,9 +100,8 @@ class CollectionGetMixin:
                         vobject_item, = vobject_items
                         temp_item = radicale_item.Item(
                             collection=self, vobject_item=vobject_item)
-                        cache_hash, uid, etag, text, name, tag, start, end = \
-                            self._store_item_cache(
-                                href, temp_item, input_hash)
+                        cache_content = self._store_item_cache(
+                            href, temp_item, cache_hash)
                     except Exception as e:
                         raise RuntimeError("Failed to load item %r in %r: %s" %
                                            (href, self.path, e)) from e
@@ -108,11 +116,14 @@ class CollectionGetMixin:
         # Don't keep reference to ``vobject_item``, because it requires a lot
         # of memory.
         return radicale_item.Item(
-            collection=self, href=href, last_modified=last_modified, etag=etag,
-            text=text, uid=uid, name=name, component_name=tag,
-            time_range=(start, end))
+            collection=self, href=href, last_modified=last_modified,
+            etag=cache_content.etag, text=cache_content.text,
+            uid=cache_content.uid, name=cache_content.name,
+            component_name=cache_content.tag,
+            time_range=(cache_content.start, cache_content.end))
 
-    def get_multi(self, hrefs):
+    def get_multi(self, hrefs: Iterable[str]
+                  ) -> Iterator[Tuple[str, Optional[radicale_item.Item]]]:
         # It's faster to check for file name collissions here, because
         # we only need to call os.listdir once.
         files = None
@@ -124,13 +135,16 @@ class CollectionGetMixin:
             path = os.path.join(self._filesystem_path, href)
             if (not pathutils.is_safe_filesystem_path_component(href) or
                     href not in files and os.path.lexists(path)):
-                logger.debug(
-                    "Can't translate name safely to filesystem: %r", href)
+                logger.debug("Can't translate name safely to filesystem: %r",
+                             href)
                 yield (href, None)
             else:
                 yield (href, self._get(href, verify_href=False))
 
-    def get_all(self):
-        # We don't need to check for collissions, because the the file names
-        # are from os.listdir.
-        return (self._get(href, verify_href=False) for href in self._list())
+    def get_all(self) -> Iterator[radicale_item.Item]:
+        for href in self._list():
+            # We don't need to check for collissions, because the file names
+            # are from os.listdir.
+            item = self._get(href, verify_href=False)
+            if item is not None:
+                yield item

+ 17 - 5
radicale/storage/multifilesystem/history.py

@@ -20,13 +20,25 @@ import binascii
 import contextlib
 import os
 import pickle
+from typing import BinaryIO, Optional, cast
 
 import radicale.item as radicale_item
 from radicale import pathutils
 from radicale.log import logger
+from radicale.storage import multifilesystem
+from radicale.storage.multifilesystem.base import CollectionBase
 
 
-class CollectionHistoryMixin:
+class CollectionPartHistory(CollectionBase):
+
+    _max_sync_token_age: int
+
+    def __init__(self, storage_: "multifilesystem.Storage", path: str,
+                 filesystem_path: Optional[str] = None) -> None:
+        super().__init__(storage_, path, filesystem_path)
+        self._max_sync_token_age = storage_.configuration.get(
+            "storage", "max_sync_token_age")
+
     def _update_history_etag(self, href, item):
         """Updates and retrieves the history etag from the history cache.
 
@@ -56,8 +68,9 @@ class CollectionHistoryMixin:
                 history_etag + "/" + etag).strip("\"")
             # Race: Other processes might have created and locked the file.
             with contextlib.suppress(PermissionError), self._atomic_write(
-                    os.path.join(history_folder, href), "wb") as f:
-                pickle.dump([etag, history_etag], f)
+                    os.path.join(history_folder, href), "wb") as fo:
+                fb = cast(BinaryIO, fo)
+                pickle.dump([etag, history_etag], fb)
         return history_etag
 
     def _get_deleted_history_hrefs(self):
@@ -79,5 +92,4 @@ class CollectionHistoryMixin:
         history_folder = os.path.join(self._filesystem_path,
                                       ".Radicale.cache", "history")
         self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
-                          max_age=self._storage.configuration.get(
-                              "storage", "max_sync_token_age"))
+                          max_age=self._max_sync_token_age)

+ 33 - 24
radicale/storage/multifilesystem/lock.py

@@ -23,56 +23,65 @@ import shlex
 import signal
 import subprocess
 import sys
+from typing import Iterator
 
-from radicale import pathutils
+from radicale import config, pathutils, types
 from radicale.log import logger
+from radicale.storage.multifilesystem.base import CollectionBase, StorageBase
 
 
-class CollectionLockMixin:
-    def _acquire_cache_lock(self, ns=""):
+class CollectionPartLock(CollectionBase):
+
+    @types.contextmanager
+    def _acquire_cache_lock(self, ns: str = "") -> Iterator[None]:
         if self._storage._lock.locked == "w":
-            return contextlib.ExitStack()
+            yield
+            return
         cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache")
         self._storage._makedirs_synced(cache_folder)
         lock_path = os.path.join(cache_folder,
                                  ".Radicale.lock" + (".%s" % ns if ns else ""))
         lock = pathutils.RwLock(lock_path)
-        return lock.acquire("w")
+        with lock.acquire("w"):
+            yield
+
 
+class StoragePartLock(StorageBase):
 
-class StorageLockMixin:
+    _lock: pathutils.RwLock
+    _hook: str
 
-    def __init__(self, configuration):
+    def __init__(self, configuration: config.Configuration) -> None:
         super().__init__(configuration)
-        folder = self.configuration.get("storage", "filesystem_folder")
-        lock_path = os.path.join(folder, ".Radicale.lock")
+        lock_path = os.path.join(self._filesystem_folder, ".Radicale.lock")
         self._lock = pathutils.RwLock(lock_path)
+        self._hook = configuration.get("storage", "hook")
 
-    @contextlib.contextmanager
-    def acquire_lock(self, mode, user=""):
+    @types.contextmanager
+    def acquire_lock(self, mode: str, user: str = "") -> Iterator[None]:
         with self._lock.acquire(mode):
             yield
             # execute hook
-            hook = self.configuration.get("storage", "hook")
-            if mode == "w" and hook:
-                folder = self.configuration.get("storage", "filesystem_folder")
+            if mode == "w" and self._hook:
                 debug = logger.isEnabledFor(logging.DEBUG)
-                popen_kwargs = dict(
-                    stdin=subprocess.DEVNULL,
-                    stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
-                    stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
-                    shell=True, universal_newlines=True, cwd=folder)
                 # Use new process group for child to prevent terminals
                 # from sending SIGINT etc.
+                preexec_fn = None
+                creationflags = 0
                 if os.name == "posix":
                     # Process group is also used to identify child processes
-                    popen_kwargs["preexec_fn"] = os.setpgrp
+                    preexec_fn = os.setpgrp
                 elif sys.platform == "win32":
-                    popen_kwargs["creationflags"] = (
-                        subprocess.CREATE_NEW_PROCESS_GROUP)
-                command = hook % {"user": shlex.quote(user or "Anonymous")}
+                    creationflags |= subprocess.CREATE_NEW_PROCESS_GROUP
+                command = self._hook % {
+                    "user": shlex.quote(user or "Anonymous")}
                 logger.debug("Running storage hook")
-                p = subprocess.Popen(command, **popen_kwargs)
+                p = subprocess.Popen(
+                    command, stdin=subprocess.DEVNULL,
+                    stdout=subprocess.PIPE if debug else subprocess.DEVNULL,
+                    stderr=subprocess.PIPE if debug else subprocess.DEVNULL,
+                    shell=True, universal_newlines=True, preexec_fn=preexec_fn,
+                    cwd=self._filesystem_folder, creationflags=creationflags)
                 try:
                     stdout_data, stderr_data = p.communicate()
                 except BaseException:  # e.g. KeyboardInterrupt or SystemExit

+ 22 - 6
radicale/storage/multifilesystem/meta.py

@@ -18,18 +18,33 @@
 
 import json
 import os
+from typing import Mapping, Optional, TextIO, Union, cast, overload
 
 import radicale.item as radicale_item
+from radicale.storage import multifilesystem
+from radicale.storage.multifilesystem.base import CollectionBase
 
 
-class CollectionMetaMixin:
-    def __init__(self):
-        super().__init__()
+class CollectionPartMeta(CollectionBase):
+
+    _meta_cache: Optional[Mapping[str, str]]
+    _props_path: str
+
+    def __init__(self, storage_: "multifilesystem.Storage", path: str,
+                 filesystem_path: Optional[str] = None) -> None:
+        super().__init__(storage_, path, filesystem_path)
         self._meta_cache = None
         self._props_path = os.path.join(
             self._filesystem_path, ".Radicale.props")
 
-    def get_meta(self, key=None):
+    @overload
+    def get_meta(self, key: None = None) -> Mapping[str, str]: ...
+
+    @overload
+    def get_meta(self, key: str) -> Optional[str]: ...
+
+    def get_meta(self, key: Optional[str] = None) -> Union[Mapping[str, str],
+                                                           Optional[str]]:
         # reuse cached value if the storage is read-only
         if self._storage._lock.locked == "w" or self._meta_cache is None:
             try:
@@ -45,6 +60,7 @@ class CollectionMetaMixin:
                                    "%r: %s" % (self.path, e)) from e
         return self._meta_cache if key is None else self._meta_cache.get(key)
 
-    def set_meta(self, props):
-        with self._atomic_write(self._props_path, "w") as f:
+    def set_meta(self, props: Mapping[str, str]) -> None:
+        with self._atomic_write(self._props_path, "w") as fo:
+            f = cast(TextIO, fo)
             json.dump(props, f, sort_keys=True)

+ 14 - 8
radicale/storage/multifilesystem/move.py

@@ -18,19 +18,25 @@
 
 import os
 
-from radicale import pathutils
+from radicale import item as radicale_item
+from radicale import pathutils, storage
+from radicale.storage import multifilesystem
+from radicale.storage.multifilesystem.base import StorageBase
 
 
-class StorageMoveMixin:
+class StoragePartMove(StorageBase):
 
-    def move(self, item, to_collection, to_href):
+    def move(self, item: radicale_item.Item,
+             to_collection: storage.BaseCollection, to_href: str) -> None:
         if not pathutils.is_safe_filesystem_path_component(to_href):
             raise pathutils.UnsafePathError(to_href)
-        os.replace(
-            pathutils.path_to_filesystem(
-                item.collection._filesystem_path, item.href),
-            pathutils.path_to_filesystem(
-                to_collection._filesystem_path, to_href))
+        assert isinstance(to_collection, multifilesystem.Collection)
+        assert isinstance(item.collection, multifilesystem.Collection)
+        assert item.href
+        os.replace(pathutils.path_to_filesystem(
+                       item.collection._filesystem_path, item.href),
+                   pathutils.path_to_filesystem(
+                       to_collection._filesystem_path, to_href))
         self._sync_directory(to_collection._filesystem_path)
         if item.collection._filesystem_path != to_collection._filesystem_path:
             self._sync_directory(item.collection._filesystem_path)

+ 13 - 7
radicale/storage/multifilesystem/sync.py

@@ -21,16 +21,22 @@ import itertools
 import os
 import pickle
 from hashlib import sha256
+from typing import BinaryIO, Iterable, Tuple, cast
 
 from radicale.log import logger
+from radicale.storage.multifilesystem.base import CollectionBase
+from radicale.storage.multifilesystem.cache import CollectionPartCache
+from radicale.storage.multifilesystem.history import CollectionPartHistory
 
 
-class CollectionSyncMixin:
-    def sync(self, old_token=""):
+class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
+                         CollectionBase):
+
+    def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
         # The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
         # where TOKEN_NAME is the sha256 hash of all history etags of present
         # and past items of the collection.
-        def check_token_name(token_name):
+        def check_token_name(token_name: str) -> bool:
             if len(token_name) != 64:
                 return False
             for c in token_name:
@@ -89,15 +95,15 @@ class CollectionSyncMixin:
             self._storage._makedirs_synced(token_folder)
             try:
                 # Race: Other processes might have created and locked the file.
-                with self._atomic_write(token_path, "wb") as f:
-                    pickle.dump(state, f)
+                with self._atomic_write(token_path, "wb") as fo:
+                    fb = cast(BinaryIO, fo)
+                    pickle.dump(state, fb)
             except PermissionError:
                 pass
             else:
                 # clean up old sync tokens and item cache
                 self._clean_cache(token_folder, os.listdir(token_folder),
-                                  max_age=self._storage.configuration.get(
-                                      "storage", "max_sync_token_age"))
+                                  max_age=self._max_sync_token_age)
                 self._clean_history()
         else:
             # Try to update the modification time

+ 25 - 11
radicale/storage/multifilesystem/upload.py

@@ -19,13 +19,21 @@
 import os
 import pickle
 import sys
+from typing import Iterable, Set, TextIO, cast
 
 import radicale.item as radicale_item
 from radicale import pathutils
+from radicale.storage.multifilesystem.base import CollectionBase
+from radicale.storage.multifilesystem.cache import CollectionPartCache
+from radicale.storage.multifilesystem.get import CollectionPartGet
+from radicale.storage.multifilesystem.history import CollectionPartHistory
 
 
-class CollectionUploadMixin:
-    def upload(self, href, item):
+class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
+                           CollectionPartHistory, CollectionBase):
+
+    def upload(self, href: str, item: radicale_item.Item
+               ) -> radicale_item.Item:
         if not pathutils.is_safe_filesystem_path_component(href):
             raise pathutils.UnsafePathError(href)
         try:
@@ -34,17 +42,22 @@ class CollectionUploadMixin:
             raise ValueError("Failed to store item %r in collection %r: %s" %
                              (href, self.path, e)) from e
         path = pathutils.path_to_filesystem(self._filesystem_path, href)
-        with self._atomic_write(path, newline="") as fd:
-            fd.write(item.serialize())
+        with self._atomic_write(path, newline="") as fo:
+            f = cast(TextIO, fo)
+            f.write(item.serialize())
         # Clean the cache after the actual item is stored, or the cache entry
         # will be removed again.
         self._clean_item_cache()
         # Track the change
         self._update_history_etag(href, item)
         self._clean_history()
-        return self._get(href, verify_href=False)
+        uploaded_item = self._get(href, verify_href=False)
+        if uploaded_item is None:
+            raise RuntimeError("Storage modified externally")
+        return uploaded_item
 
-    def _upload_all_nonatomic(self, items, suffix=""):
+    def _upload_all_nonatomic(self, items: Iterable[radicale_item.Item],
+                              suffix: str = "") -> None:
         """Upload a new set of items.
 
         This takes a list of vobject items and
@@ -54,7 +67,7 @@ class CollectionUploadMixin:
         cache_folder = os.path.join(self._filesystem_path,
                                     ".Radicale.cache", "item")
         self._storage._makedirs_synced(cache_folder)
-        hrefs = set()
+        hrefs: Set[str] = set()
         for item in items:
             uid = item.uid
             try:
@@ -92,14 +105,15 @@ class CollectionUploadMixin:
                             sys.platform == "win32" and e.errno == 123):
                         continue
                     raise
+            assert href is not None and f is not None
             with f:
                 f.write(item.serialize())
                 f.flush()
                 self._storage._fsync(f)
             hrefs.add(href)
-            with open(os.path.join(cache_folder, href), "wb") as f:
-                pickle.dump(cache_content, f)
-                f.flush()
-                self._storage._fsync(f)
+            with open(os.path.join(cache_folder, href), "wb") as fb:
+                pickle.dump(cache_content, fb)
+                fb.flush()
+                self._storage._fsync(fb)
         self._storage._sync_directory(cache_folder)
         self._storage._sync_directory(self._filesystem_path)

+ 16 - 10
radicale/storage/multifilesystem/verify.py

@@ -16,23 +16,27 @@
 # You should have received a copy of the GNU General Public License
 # along with Radicale.  If not, see <http://www.gnu.org/licenses/>.
 
-import contextlib
+from typing import Iterator, Optional, Set
 
-from radicale import pathutils, storage
+from radicale import pathutils, storage, types
 from radicale.log import logger
+from radicale.storage.multifilesystem.base import StorageBase
+from radicale.storage.multifilesystem.discover import StoragePartDiscover
 
 
-class StorageVerifyMixin:
-    def verify(self):
+class StoragePartVerify(StoragePartDiscover, StorageBase):
+
+    def verify(self) -> bool:
         item_errors = collection_errors = 0
 
-        @contextlib.contextmanager
-        def exception_cm(sane_path, href=None):
+        @types.contextmanager
+        def exception_cm(sane_path: str, href: Optional[str]
+                         ) -> Iterator[None]:
             nonlocal item_errors, collection_errors
             try:
                 yield
             except Exception as e:
-                if href:
+                if href is not None:
                     item_errors += 1
                     name = "item %r in %r" % (href, sane_path)
                 else:
@@ -45,13 +49,14 @@ class StorageVerifyMixin:
             sane_path = remaining_sane_paths.pop(0)
             path = pathutils.unstrip_path(sane_path, True)
             logger.debug("Verifying collection %r", sane_path)
-            with exception_cm(sane_path):
+            with exception_cm(sane_path, None):
                 saved_item_errors = item_errors
-                collection = None
-                uids = set()
+                collection: Optional[storage.BaseCollection] = None
+                uids: Set[str] = set()
                 has_child_collections = False
                 for item in self.discover(path, "1", exception_cm):
                     if not collection:
+                        assert isinstance(item, storage.BaseCollection)
                         collection = item
                         collection.get_meta()
                         continue
@@ -65,6 +70,7 @@ class StorageVerifyMixin:
                         uids.add(item.uid)
                         logger.debug("Verified item %r in %r",
                                      item.href, sane_path)
+                assert collection
                 if item_errors == saved_item_errors:
                     collection.sync()
                 if has_child_collections and collection.tag: