sync.py 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. # This file is part of Radicale - CalDAV and CardDAV server
  2. # Copyright © 2014 Jean-Marc Martins
  3. # Copyright © 2012-2017 Guillaume Ayoub
  4. # Copyright © 2017-2019 Unrud <unrud@outlook.com>
  5. #
  6. # This library is free software: you can redistribute it and/or modify
  7. # it under the terms of the GNU General Public License as published by
  8. # the Free Software Foundation, either version 3 of the License, or
  9. # (at your option) any later version.
  10. #
  11. # This library is distributed in the hope that it will be useful,
  12. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. # GNU General Public License for more details.
  15. #
  16. # You should have received a copy of the GNU General Public License
  17. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  18. import contextlib
  19. import itertools
  20. import os
  21. import pickle
  22. from hashlib import sha256
  23. from typing import BinaryIO, Iterable, Tuple, cast
  24. from radicale.log import logger
  25. from radicale.storage.multifilesystem.base import CollectionBase
  26. from radicale.storage.multifilesystem.cache import CollectionPartCache
  27. from radicale.storage.multifilesystem.history import CollectionPartHistory
  28. class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
  29. CollectionBase):
  30. def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
  31. # The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
  32. # where TOKEN_NAME is the sha256 hash of all history etags of present
  33. # and past items of the collection.
  34. def check_token_name(token_name: str) -> bool:
  35. if len(token_name) != 64:
  36. return False
  37. for c in token_name:
  38. if c not in "0123456789abcdef":
  39. return False
  40. return True
  41. old_token_name = ""
  42. if old_token:
  43. # Extract the token name from the sync token
  44. if not old_token.startswith("http://radicale.org/ns/sync/"):
  45. raise ValueError("Malformed token: %r" % old_token)
  46. old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
  47. if not check_token_name(old_token_name):
  48. raise ValueError("Malformed token: %r" % old_token)
  49. # Get the current state and sync-token of the collection.
  50. state = {}
  51. token_name_hash = sha256()
  52. # Find the history of all existing and deleted items
  53. for href, item in itertools.chain(
  54. ((item.href, item) for item in self.get_all()),
  55. ((href, None) for href in self._get_deleted_history_hrefs())):
  56. history_etag = self._update_history_etag(href, item)
  57. state[href] = history_etag
  58. token_name_hash.update((href + "/" + history_etag).encode())
  59. token_name = token_name_hash.hexdigest()
  60. token = "http://radicale.org/ns/sync/%s" % token_name
  61. if token_name == old_token_name:
  62. # Nothing changed
  63. return token, ()
  64. token_folder = os.path.join(self._filesystem_path,
  65. ".Radicale.cache", "sync-token")
  66. token_path = os.path.join(token_folder, token_name)
  67. old_state = {}
  68. if old_token_name:
  69. # load the old token state
  70. old_token_path = os.path.join(token_folder, old_token_name)
  71. try:
  72. # Race: Another process might have deleted the file.
  73. with open(old_token_path, "rb") as f:
  74. old_state = pickle.load(f)
  75. except (FileNotFoundError, pickle.UnpicklingError,
  76. ValueError) as e:
  77. if isinstance(e, (pickle.UnpicklingError, ValueError)):
  78. logger.warning(
  79. "Failed to load stored sync token %r in %r: %s",
  80. old_token_name, self.path, e, exc_info=True)
  81. # Delete the damaged file
  82. with contextlib.suppress(FileNotFoundError,
  83. PermissionError):
  84. os.remove(old_token_path)
  85. raise ValueError("Token not found: %r" % old_token)
  86. # write the new token state or update the modification time of
  87. # existing token state
  88. if not os.path.exists(token_path):
  89. self._storage._makedirs_synced(token_folder)
  90. try:
  91. # Race: Other processes might have created and locked the file.
  92. with self._atomic_write(token_path, "wb") as fo: # type: ignore # for now, TODO fix for "mypy"
  93. fb = cast(BinaryIO, fo)
  94. pickle.dump(state, fb)
  95. except PermissionError:
  96. pass
  97. else:
  98. # clean up old sync tokens and item cache
  99. self._clean_cache(token_folder, os.listdir(token_folder),
  100. max_age=self._max_sync_token_age)
  101. self._clean_history()
  102. else:
  103. # Try to update the modification time
  104. with contextlib.suppress(FileNotFoundError):
  105. # Race: Another process might have deleted the file.
  106. os.utime(token_path)
  107. changes = []
  108. # Find all new, changed and deleted (that are still in the item cache)
  109. # items
  110. for href, history_etag in state.items():
  111. if history_etag != old_state.get(href):
  112. changes.append(href)
  113. # Find all deleted items that are no longer in the item cache
  114. for href, history_etag in old_state.items():
  115. if href not in state:
  116. changes.append(href)
  117. return token, changes