report.py 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807
  1. # This file is part of Radicale - CalDAV and CardDAV server
  2. # Copyright © 2008 Nicolas Kandel
  3. # Copyright © 2008 Pascal Halter
  4. # Copyright © 2008-2017 Guillaume Ayoub
  5. # Copyright © 2017-2021 Unrud <unrud@outlook.com>
  6. # Copyright © 2024-2024 Pieter Hijma <pieterhijma@users.noreply.github.com>
  7. # Copyright © 2024-2024 Ray <ray@react0r.com>
  8. # Copyright © 2024-2025 Georgiy <metallerok@gmail.com>
  9. # Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
  10. # Copyright © 2025-2025 David Greaves <david@dgreaves.com>
  11. #
  12. # This library is free software: you can redistribute it and/or modify
  13. # it under the terms of the GNU General Public License as published by
  14. # the Free Software Foundation, either version 3 of the License, or
  15. # (at your option) any later version.
  16. #
  17. # This library is distributed in the hope that it will be useful,
  18. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  19. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20. # GNU General Public License for more details.
  21. #
  22. # You should have received a copy of the GNU General Public License
  23. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  24. import contextlib
  25. import copy
  26. import datetime
  27. import posixpath
  28. import socket
  29. import xml.etree.ElementTree as ET
  30. from http import client
  31. from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
  32. Tuple, Union)
  33. from urllib.parse import unquote, urlparse
  34. import vobject
  35. import vobject.base
  36. from vobject.base import ContentLine
  37. import radicale.item as radicale_item
  38. from radicale import httputils, pathutils, storage, types, xmlutils
  39. from radicale.app.base import Access, ApplicationBase
  40. from radicale.item import filter as radicale_filter
  41. from radicale.log import logger
  42. def free_busy_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
  43. collection: storage.BaseCollection, encoding: str,
  44. unlock_storage_fn: Callable[[], None],
  45. max_occurrence: int
  46. ) -> Tuple[int, Union[ET.Element, str]]:
  47. # NOTE: this function returns both an Element and a string because
  48. # free-busy reports are an edge-case on the return type according
  49. # to the spec.
  50. multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
  51. if xml_request is None:
  52. return client.MULTI_STATUS, multistatus
  53. root = xml_request
  54. if (root.tag == xmlutils.make_clark("C:free-busy-query") and
  55. collection.tag != "VCALENDAR"):
  56. logger.warning("Invalid REPORT method %r on %r requested",
  57. xmlutils.make_human_tag(root.tag), path)
  58. return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
  59. time_range_element = root.find(xmlutils.make_clark("C:time-range"))
  60. assert isinstance(time_range_element, ET.Element)
  61. # Build a single filter from the free busy query for retrieval
  62. # TODO: filter for VFREEBUSY in additional to VEVENT but
  63. # test_filter doesn't support that yet.
  64. vevent_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
  65. attrib={'name': 'VEVENT'})
  66. vevent_cf_element.append(time_range_element)
  67. vcalendar_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
  68. attrib={'name': 'VCALENDAR'})
  69. vcalendar_cf_element.append(vevent_cf_element)
  70. filter_element = ET.Element(xmlutils.make_clark("C:filter"))
  71. filter_element.append(vcalendar_cf_element)
  72. filters = (filter_element,)
  73. # First pull from storage
  74. retrieved_items = list(collection.get_filtered(filters))
  75. # !!! Don't access storage after this !!!
  76. unlock_storage_fn()
  77. cal = vobject.iCalendar()
  78. collection_tag = collection.tag
  79. while retrieved_items:
  80. # Second filtering before evaluating occurrences.
  81. # ``item.vobject_item`` might be accessed during filtering.
  82. # Don't keep reference to ``item``, because VObject requires a lot of
  83. # memory.
  84. item, filter_matched = retrieved_items.pop(0)
  85. if not filter_matched:
  86. try:
  87. if not test_filter(collection_tag, item, filter_element):
  88. continue
  89. except ValueError as e:
  90. raise ValueError("Failed to free-busy filter item %r from %r: %s" %
  91. (item.href, collection.path, e)) from e
  92. except Exception as e:
  93. raise RuntimeError("Failed to free-busy filter item %r from %r: %s" %
  94. (item.href, collection.path, e)) from e
  95. fbtype = None
  96. if item.component_name == 'VEVENT':
  97. transp = getattr(item.vobject_item.vevent, 'transp', None)
  98. if transp and transp.value != 'OPAQUE':
  99. continue
  100. status = getattr(item.vobject_item.vevent, 'status', None)
  101. if not status or status.value == 'CONFIRMED':
  102. fbtype = 'BUSY'
  103. elif status.value == 'CANCELLED':
  104. fbtype = 'FREE'
  105. elif status.value == 'TENTATIVE':
  106. fbtype = 'BUSY-TENTATIVE'
  107. else:
  108. # Could do fbtype = status.value for x-name, I prefer this
  109. fbtype = 'BUSY'
  110. # TODO: coalesce overlapping periods
  111. if max_occurrence > 0:
  112. n_occurrences = max_occurrence+1
  113. else:
  114. n_occurrences = 0
  115. occurrences = radicale_filter.time_range_fill(item.vobject_item,
  116. time_range_element,
  117. "VEVENT",
  118. n=n_occurrences)
  119. if len(occurrences) >= max_occurrence:
  120. raise ValueError("FREEBUSY occurrences limit of {} hit"
  121. .format(max_occurrence))
  122. for occurrence in occurrences:
  123. vfb = cal.add('vfreebusy')
  124. vfb.add('dtstamp').value = item.vobject_item.vevent.dtstamp.value
  125. vfb.add('dtstart').value, vfb.add('dtend').value = occurrence
  126. if fbtype:
  127. vfb.add('fbtype').value = fbtype
  128. return (client.OK, cal.serialize())
  129. def xml_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
  130. collection: storage.BaseCollection, encoding: str,
  131. unlock_storage_fn: Callable[[], None],
  132. max_occurrence: int = 0,
  133. ) -> Tuple[int, ET.Element]:
  134. """Read and answer REPORT requests that return XML.
  135. Read rfc3253-3.6 for info.
  136. """
  137. multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
  138. if xml_request is None:
  139. return client.MULTI_STATUS, multistatus
  140. root = xml_request
  141. if root.tag in (xmlutils.make_clark("D:principal-search-property-set"),
  142. xmlutils.make_clark("D:principal-property-search"),
  143. xmlutils.make_clark("D:expand-property")):
  144. # We don't support searching for principals or indirect retrieving of
  145. # properties, just return an empty result.
  146. # InfCloud asks for expand-property reports (even if we don't announce
  147. # support for them) and stops working if an error code is returned.
  148. logger.warning("Unsupported REPORT method %r on %r requested",
  149. xmlutils.make_human_tag(root.tag), path)
  150. return client.MULTI_STATUS, multistatus
  151. if (root.tag == xmlutils.make_clark("C:calendar-multiget") and
  152. collection.tag != "VCALENDAR" or
  153. root.tag == xmlutils.make_clark("CR:addressbook-multiget") and
  154. collection.tag != "VADDRESSBOOK" or
  155. root.tag == xmlutils.make_clark("D:sync-collection") and
  156. collection.tag not in ("VADDRESSBOOK", "VCALENDAR")):
  157. logger.warning("Invalid REPORT method %r on %r requested",
  158. xmlutils.make_human_tag(root.tag), path)
  159. return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
  160. props: Union[ET.Element, List]
  161. if root.find(xmlutils.make_clark("D:prop")) is not None:
  162. props = root.find(xmlutils.make_clark("D:prop")) # type: ignore[assignment]
  163. else:
  164. props = []
  165. hreferences: Iterable[str]
  166. if root.tag in (
  167. xmlutils.make_clark("C:calendar-multiget"),
  168. xmlutils.make_clark("CR:addressbook-multiget")):
  169. # Read rfc4791-7.9 for info
  170. hreferences = set()
  171. for href_element in root.findall(xmlutils.make_clark("D:href")):
  172. temp_url_path = urlparse(href_element.text).path
  173. assert isinstance(temp_url_path, str)
  174. href_path = pathutils.sanitize_path(unquote(temp_url_path))
  175. if (href_path + "/").startswith(base_prefix + "/"):
  176. hreferences.add(href_path[len(base_prefix):])
  177. else:
  178. logger.warning("Skipping invalid path %r in REPORT request on "
  179. "%r", href_path, path)
  180. elif root.tag == xmlutils.make_clark("D:sync-collection"):
  181. old_sync_token_element = root.find(
  182. xmlutils.make_clark("D:sync-token"))
  183. old_sync_token = ""
  184. if old_sync_token_element is not None and old_sync_token_element.text:
  185. old_sync_token = old_sync_token_element.text.strip()
  186. logger.debug("Client provided sync token: %r", old_sync_token)
  187. try:
  188. sync_token, names = collection.sync(old_sync_token)
  189. except ValueError as e:
  190. # Invalid sync token
  191. logger.warning("Client provided invalid sync token %r: %s",
  192. old_sync_token, e, exc_info=True)
  193. # client.CONFLICT doesn't work with some clients (e.g. InfCloud)
  194. return (client.FORBIDDEN,
  195. xmlutils.webdav_error("D:valid-sync-token"))
  196. hreferences = (pathutils.unstrip_path(
  197. posixpath.join(collection.path, n)) for n in names)
  198. # Append current sync token to response
  199. sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token"))
  200. sync_token_element.text = sync_token
  201. multistatus.append(sync_token_element)
  202. else:
  203. hreferences = (path,)
  204. filters = (
  205. root.findall(xmlutils.make_clark("C:filter")) +
  206. root.findall(xmlutils.make_clark("CR:filter")))
  207. expand = root.find(".//" + xmlutils.make_clark("C:expand"))
  208. # if we have expand prop we use "filter (except time range) -> expand -> filter (only time range)" approach
  209. time_range_element = None
  210. main_filters = []
  211. for filter_ in filters:
  212. # extract time-range filter for processing after main filters
  213. # for expand request
  214. filter_copy = copy.deepcopy(filter_)
  215. if expand is not None:
  216. for comp_filter in filter_copy.findall(".//" + xmlutils.make_clark("C:comp-filter")):
  217. if comp_filter.get("name", "").upper() == "VCALENDAR":
  218. continue
  219. time_range_element = comp_filter.find(xmlutils.make_clark("C:time-range"))
  220. if time_range_element is not None:
  221. comp_filter.remove(time_range_element)
  222. main_filters.append(filter_copy)
  223. # Retrieve everything required for finishing the request.
  224. retrieved_items = list(retrieve_items(
  225. base_prefix, path, collection, hreferences, main_filters, multistatus))
  226. collection_tag = collection.tag
  227. # !!! Don't access storage after this !!!
  228. unlock_storage_fn()
  229. n_vevents = 0
  230. while retrieved_items:
  231. # ``item.vobject_item`` might be accessed during filtering.
  232. # Don't keep reference to ``item``, because VObject requires a lot of
  233. # memory.
  234. item, filters_matched = retrieved_items.pop(0)
  235. if filters and not filters_matched:
  236. try:
  237. if not all(test_filter(collection_tag, item, filter_)
  238. for filter_ in main_filters):
  239. continue
  240. except ValueError as e:
  241. raise ValueError("Failed to filter item %r from %r: %s" %
  242. (item.href, collection.path, e)) from e
  243. except Exception as e:
  244. raise RuntimeError("Failed to filter item %r from %r: %s" %
  245. (item.href, collection.path, e)) from e
  246. found_props = []
  247. not_found_props = []
  248. for prop in props:
  249. element = ET.Element(prop.tag)
  250. if prop.tag == xmlutils.make_clark("D:getetag"):
  251. element.text = item.etag
  252. found_props.append(element)
  253. elif prop.tag == xmlutils.make_clark("D:getcontenttype"):
  254. element.text = xmlutils.get_content_type(item, encoding)
  255. found_props.append(element)
  256. elif prop.tag in (
  257. xmlutils.make_clark("C:calendar-data"),
  258. xmlutils.make_clark("CR:address-data")):
  259. element.text = item.serialize()
  260. if (expand is not None) and item.component_name == 'VEVENT':
  261. starts = expand.get('start')
  262. ends = expand.get('end')
  263. if (starts is None) or (ends is None):
  264. return client.FORBIDDEN, \
  265. xmlutils.webdav_error("C:expand")
  266. start = datetime.datetime.strptime(
  267. starts, '%Y%m%dT%H%M%SZ'
  268. ).replace(tzinfo=datetime.timezone.utc)
  269. end = datetime.datetime.strptime(
  270. ends, '%Y%m%dT%H%M%SZ'
  271. ).replace(tzinfo=datetime.timezone.utc)
  272. time_range_start = None
  273. time_range_end = None
  274. if time_range_element is not None:
  275. time_range_start, time_range_end = radicale_filter.parse_time_range(time_range_element)
  276. (expanded_element, n_vev) = _expand(
  277. element=element, item=copy.copy(item),
  278. start=start, end=end,
  279. time_range_start=time_range_start, time_range_end=time_range_end,
  280. max_occurrence=max_occurrence,
  281. )
  282. if n_vev == 0:
  283. logger.debug("No VEVENTs found after expansion for %r, skipping", item.href)
  284. continue
  285. n_vevents += n_vev
  286. found_props.append(expanded_element)
  287. else:
  288. found_props.append(element)
  289. if hasattr(item.vobject_item, "vevent_list"):
  290. n_vevents += len(item.vobject_item.vevent_list)
  291. # Avoid DoS with too many events
  292. if max_occurrence and n_vevents > max_occurrence:
  293. raise ValueError("REPORT occurrences limit of {} hit"
  294. .format(max_occurrence))
  295. else:
  296. not_found_props.append(element)
  297. assert item.href
  298. uri = pathutils.unstrip_path(
  299. posixpath.join(collection.path, item.href))
  300. if found_props or not_found_props:
  301. multistatus.append(xml_item_response(
  302. base_prefix, uri, found_props=found_props,
  303. not_found_props=not_found_props, found_item=True))
  304. return client.MULTI_STATUS, multistatus
  305. def _expand(
  306. element: ET.Element,
  307. item: radicale_item.Item,
  308. start: datetime.datetime,
  309. end: datetime.datetime,
  310. time_range_start: Optional[datetime.datetime] = None,
  311. time_range_end: Optional[datetime.datetime] = None,
  312. max_occurrence: int = 0,
  313. ) -> Tuple[ET.Element, int]:
  314. vevent_component: vobject.base.Component = copy.copy(item.vobject_item)
  315. logger.info("Expanding event %s", item.href)
  316. logger.debug(f"Expand range: {start} to {end}")
  317. logger.debug(f"Time range: {time_range_start} to {time_range_end}")
  318. # Split the vevents included in the component into one that contains the
  319. # recurrence information and others that contain a recurrence id to
  320. # override instances.
  321. base_vevent, vevents_overridden = _split_overridden_vevents(vevent_component)
  322. dt_format = '%Y%m%dT%H%M%SZ'
  323. all_day_event = False
  324. if type(base_vevent.dtstart.value) is datetime.date:
  325. # If an event comes to us with a dtstart specified as a date
  326. # then in the response we return the date, not datetime
  327. dt_format = '%Y%m%d'
  328. all_day_event = True
  329. # In case of dates, we need to remove timezone information since
  330. # rruleset.between computes with datetimes without timezone information
  331. start = start.replace(tzinfo=None)
  332. end = end.replace(tzinfo=None)
  333. if time_range_start is not None and time_range_end is not None:
  334. time_range_start = time_range_start.replace(tzinfo=None)
  335. time_range_end = time_range_end.replace(tzinfo=None)
  336. for vevent in vevents_overridden:
  337. _strip_single_event(vevent, dt_format)
  338. duration = None
  339. if hasattr(base_vevent, "dtend"):
  340. duration = base_vevent.dtend.value - base_vevent.dtstart.value
  341. elif hasattr(base_vevent, "duration"):
  342. try:
  343. duration = base_vevent.duration.value
  344. if duration.total_seconds() <= 0:
  345. logger.warning("Invalid DURATION: %s", duration)
  346. duration = None
  347. except (AttributeError, TypeError) as e:
  348. logger.warning("Failed to parse DURATION: %s", e)
  349. duration = None
  350. # Generate EXDATE to remove from expansion range
  351. exdates_set: set[datetime.datetime] = set()
  352. if hasattr(base_vevent, 'exdate'):
  353. exdates = base_vevent.exdate.value
  354. if not isinstance(exdates, list):
  355. exdates = [exdates]
  356. exdates_set = {
  357. exdate.astimezone(datetime.timezone.utc) if isinstance(exdate, datetime.datetime)
  358. else datetime.datetime.fromordinal(exdate.toordinal()).replace(tzinfo=None)
  359. for exdate in exdates
  360. }
  361. logger.debug("EXDATE values: %s", exdates_set)
  362. events_for_filtering = vevents_overridden
  363. rruleset = None
  364. if hasattr(base_vevent, 'rrule'):
  365. rruleset = base_vevent.getrruleset()
  366. else:
  367. # if event does not have rrule, only include base event
  368. events_for_filtering = [base_vevent]
  369. filtered_vevents = []
  370. if rruleset:
  371. # This function uses datetimes internally without timezone info for dates
  372. # A vobject rruleset is for the event dtstart.
  373. # Expanded over a given time range this will not include
  374. # events which started before the time range but are still
  375. # ongoing at the start of the range
  376. # To accomodate this, reduce the start time by the duration of
  377. # the event. If this introduces an extra reccurence point then
  378. # that event should be included as it is still ongoing. If no
  379. # extra point is generated then it was a no-op.
  380. rstart = start - duration if duration and duration.total_seconds() > 0 else start
  381. recurrences = rruleset.between(rstart, end, inc=True, count=max_occurrence)
  382. if max_occurrence and len(recurrences) >= max_occurrence:
  383. # this shouldn't be > and if it's == then assume a limit
  384. # was hit and ignore that maybe some would be filtered out
  385. # by EXDATE etc. This is anti-DoS, not precise limits
  386. raise ValueError("REPORT occurrences limit of {} hit"
  387. .format(max_occurrence))
  388. _strip_component(vevent_component)
  389. _strip_single_event(base_vevent, dt_format)
  390. i_overridden = 0
  391. for recurrence_dt in recurrences:
  392. recurrence_utc = recurrence_dt if all_day_event else recurrence_dt.astimezone(datetime.timezone.utc)
  393. logger.debug("Processing recurrence: %s (all_day_event: %s)", recurrence_utc, all_day_event)
  394. # Apply time-range filter
  395. if time_range_start is not None and time_range_end is not None:
  396. dtstart = recurrence_utc
  397. dtend = dtstart + duration if duration else dtstart
  398. # Start includes the time, end does not
  399. if not (dtstart <= time_range_end and dtend > time_range_start):
  400. logger.debug("Recurrence %s filtered out by time-range", recurrence_utc)
  401. continue
  402. # Check exdate
  403. if recurrence_utc in exdates_set:
  404. logger.debug("Recurrence %s excluded by EXDATE", recurrence_utc)
  405. continue
  406. # Check for overridden instances
  407. i_overridden, vevent = _find_overridden(i_overridden, vevents_overridden, recurrence_utc, dt_format)
  408. if not vevent:
  409. # Create new instance from recurrence
  410. vevent = copy.deepcopy(base_vevent)
  411. # For all day events, the system timezone may influence the
  412. # results, so use recurrence_dt
  413. recurrence_id = recurrence_dt if all_day_event else recurrence_utc
  414. logger.debug("Creating new VEVENT with RECURRENCE-ID: %s", recurrence_id)
  415. vevent.recurrence_id = ContentLine(
  416. name='RECURRENCE-ID',
  417. value=recurrence_id, params={}
  418. )
  419. _convert_to_utc(vevent, 'recurrence_id', dt_format)
  420. vevent.dtstart = ContentLine(
  421. name='DTSTART',
  422. value=recurrence_id.strftime(dt_format), params={}
  423. )
  424. # if there is a DTEND, override it. Duration does not need changing
  425. if hasattr(vevent, "dtend"):
  426. vevent.dtend = ContentLine(
  427. name='DTEND',
  428. value=(recurrence_id + duration).strftime(dt_format), params={}
  429. )
  430. filtered_vevents.append(vevent)
  431. # Filter overridden and non-recurring events
  432. if time_range_start is not None and time_range_end is not None:
  433. for vevent in events_for_filtering:
  434. dtstart = vevent.dtstart.value
  435. # Handle string values for DTSTART/DTEND
  436. if isinstance(dtstart, str):
  437. try:
  438. dtstart = datetime.datetime.strptime(dtstart, dt_format)
  439. if all_day_event:
  440. dtstart = dtstart.date()
  441. except ValueError as e:
  442. logger.warning("Invalid DTSTART format: %s, error: %s", dtstart, e)
  443. continue
  444. dtend = dtstart + duration if duration else dtstart
  445. logger.debug(
  446. "Filtering VEVENT with DTSTART: %s (type: %s), DTEND: %s (type: %s)",
  447. dtstart, type(dtstart), dtend, type(dtend))
  448. # Convert to datetime for comparison
  449. if all_day_event and isinstance(dtstart, datetime.date) and not isinstance(dtstart, datetime.datetime):
  450. dtstart = datetime.datetime.fromordinal(dtstart.toordinal()).replace(tzinfo=None)
  451. dtend = datetime.datetime.fromordinal(dtend.toordinal()).replace(tzinfo=None)
  452. elif not all_day_event and isinstance(dtstart, datetime.datetime) \
  453. and isinstance(dtend, datetime.datetime):
  454. dtstart = dtstart.replace(tzinfo=datetime.timezone.utc)
  455. dtend = dtend.replace(tzinfo=datetime.timezone.utc)
  456. else:
  457. logger.warning("Unexpected DTSTART/DTEND type: dtstart=%s, dtend=%s", type(dtstart), type(dtend))
  458. continue
  459. if dtstart < time_range_end and dtend > time_range_start:
  460. if vevent not in filtered_vevents: # Avoid duplicates
  461. logger.debug("VEVENT passed time-range filter: %s", dtstart)
  462. filtered_vevents.append(vevent)
  463. else:
  464. logger.debug("VEVENT filtered out: %s", dtstart)
  465. # Rebuild component
  466. if not filtered_vevents:
  467. element.text = ""
  468. return element, 0
  469. else:
  470. vevent_component.vevent_list = filtered_vevents
  471. logger.debug("lbt: vevent_component %s", vevent_component)
  472. element.text = vevent_component.serialize()
  473. return element, len(filtered_vevents)
  474. def _convert_timezone(vevent: vobject.icalendar.RecurringComponent,
  475. name_prop: str,
  476. name_content_line: str):
  477. prop = getattr(vevent, name_prop, None)
  478. if prop:
  479. if type(prop.value) is datetime.date:
  480. date_time = datetime.datetime.fromordinal(
  481. prop.value.toordinal()
  482. ).replace(tzinfo=datetime.timezone.utc)
  483. else:
  484. date_time = prop.value.astimezone(datetime.timezone.utc)
  485. setattr(vevent, name_prop, ContentLine(name=name_content_line, value=date_time, params=[]))
  486. def _convert_to_utc(vevent: vobject.icalendar.RecurringComponent,
  487. name_prop: str,
  488. dt_format: str):
  489. prop = getattr(vevent, name_prop, None)
  490. if prop:
  491. setattr(vevent, name_prop, ContentLine(name=prop.name, value=prop.value.strftime(dt_format), params=[]))
  492. def _strip_single_event(vevent: vobject.icalendar.RecurringComponent, dt_format: str) -> None:
  493. _convert_timezone(vevent, 'dtstart', 'DTSTART')
  494. _convert_timezone(vevent, 'dtend', 'DTEND')
  495. _convert_timezone(vevent, 'recurrence_id', 'RECURRENCE-ID')
  496. # There is something strange behaviour during serialization native datetime, so converting manually
  497. _convert_to_utc(vevent, 'dtstart', dt_format)
  498. _convert_to_utc(vevent, 'dtend', dt_format)
  499. _convert_to_utc(vevent, 'recurrence_id', dt_format)
  500. try:
  501. delattr(vevent, 'rrule')
  502. delattr(vevent, 'exdate')
  503. delattr(vevent, 'exrule')
  504. delattr(vevent, 'rdate')
  505. except AttributeError:
  506. pass
  507. def _strip_component(vevent: vobject.base.Component) -> None:
  508. timezones_to_remove = []
  509. for component in vevent.components():
  510. if component.name == 'VTIMEZONE':
  511. timezones_to_remove.append(component)
  512. for timezone in timezones_to_remove:
  513. vevent.remove(timezone)
  514. def _split_overridden_vevents(
  515. component: vobject.base.Component,
  516. ) -> Tuple[
  517. vobject.icalendar.RecurringComponent,
  518. List[vobject.icalendar.RecurringComponent]
  519. ]:
  520. vevent_recurrence = None
  521. vevents_overridden = []
  522. for vevent in component.vevent_list:
  523. if hasattr(vevent, 'recurrence_id'):
  524. vevents_overridden += [vevent]
  525. elif vevent_recurrence:
  526. raise ValueError(
  527. f"component with UID {vevent.uid} "
  528. f"has more than one vevent with recurrence information"
  529. )
  530. else:
  531. vevent_recurrence = vevent
  532. if vevent_recurrence:
  533. return (
  534. vevent_recurrence, sorted(
  535. vevents_overridden,
  536. key=lambda vevent: vevent.recurrence_id.value
  537. )
  538. )
  539. else:
  540. raise ValueError(
  541. f"component with UID {vevent.uid} "
  542. f"does not have a vevent without a recurrence_id"
  543. )
  544. def _find_overridden(
  545. start: int,
  546. vevents: List[vobject.icalendar.RecurringComponent],
  547. dt: datetime.datetime,
  548. dt_format: str
  549. ) -> Tuple[int, Optional[vobject.icalendar.RecurringComponent]]:
  550. for i in range(start, len(vevents)):
  551. dt_event = datetime.datetime.strptime(
  552. vevents[i].recurrence_id.value,
  553. dt_format
  554. ).replace(tzinfo=datetime.timezone.utc)
  555. if dt_event == dt:
  556. return (i + 1, vevents[i])
  557. return (start, None)
  558. def xml_item_response(base_prefix: str, href: str,
  559. found_props: Sequence[ET.Element] = (),
  560. not_found_props: Sequence[ET.Element] = (),
  561. found_item: bool = True) -> ET.Element:
  562. response = ET.Element(xmlutils.make_clark("D:response"))
  563. href_element = ET.Element(xmlutils.make_clark("D:href"))
  564. href_element.text = xmlutils.make_href(base_prefix, href)
  565. response.append(href_element)
  566. if found_item:
  567. for code, props in ((200, found_props), (404, not_found_props)):
  568. if props:
  569. propstat = ET.Element(xmlutils.make_clark("D:propstat"))
  570. status = ET.Element(xmlutils.make_clark("D:status"))
  571. status.text = xmlutils.make_response(code)
  572. prop_element = ET.Element(xmlutils.make_clark("D:prop"))
  573. for prop in props:
  574. prop_element.append(prop)
  575. propstat.append(prop_element)
  576. propstat.append(status)
  577. response.append(propstat)
  578. else:
  579. status = ET.Element(xmlutils.make_clark("D:status"))
  580. status.text = xmlutils.make_response(404)
  581. response.append(status)
  582. return response
  583. def retrieve_items(
  584. base_prefix: str, path: str, collection: storage.BaseCollection,
  585. hreferences: Iterable[str], filters: Sequence[ET.Element],
  586. multistatus: ET.Element) -> Iterator[Tuple[radicale_item.Item, bool]]:
  587. """Retrieves all items that are referenced in ``hreferences`` from
  588. ``collection`` and adds 404 responses for missing and invalid items
  589. to ``multistatus``."""
  590. collection_requested = False
  591. def get_names() -> Iterator[str]:
  592. """Extracts all names from references in ``hreferences`` and adds
  593. 404 responses for invalid references to ``multistatus``.
  594. If the whole collections is referenced ``collection_requested``
  595. gets set to ``True``."""
  596. nonlocal collection_requested
  597. for hreference in hreferences:
  598. try:
  599. name = pathutils.name_from_path(hreference, collection)
  600. except ValueError as e:
  601. logger.warning("Skipping invalid path %r in REPORT request on "
  602. "%r: %s", hreference, path, e)
  603. response = xml_item_response(base_prefix, hreference,
  604. found_item=False)
  605. multistatus.append(response)
  606. continue
  607. if name:
  608. # Reference is an item
  609. yield name
  610. else:
  611. # Reference is a collection
  612. collection_requested = True
  613. for name, item in collection.get_multi(get_names()):
  614. if not item:
  615. uri = pathutils.unstrip_path(posixpath.join(collection.path, name))
  616. response = xml_item_response(base_prefix, uri, found_item=False)
  617. multistatus.append(response)
  618. else:
  619. yield item, False
  620. if collection_requested:
  621. yield from collection.get_filtered(filters)
  622. def test_filter(collection_tag: str, item: radicale_item.Item,
  623. filter_: ET.Element) -> bool:
  624. """Match an item against a filter."""
  625. if (collection_tag == "VCALENDAR" and
  626. filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
  627. if len(filter_) == 0:
  628. return True
  629. if len(filter_) > 1:
  630. raise ValueError("Filter with %d children" % len(filter_))
  631. if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
  632. raise ValueError("Unexpected %r in filter" % filter_[0].tag)
  633. return radicale_filter.comp_match(item, filter_[0])
  634. if (collection_tag == "VADDRESSBOOK" and
  635. filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
  636. for child in filter_:
  637. if child.tag != xmlutils.make_clark("CR:prop-filter"):
  638. raise ValueError("Unexpected %r in filter" % child.tag)
  639. test = filter_.get("test", "anyof")
  640. if test == "anyof":
  641. return any(radicale_filter.prop_match(item.vobject_item, f, "CR")
  642. for f in filter_)
  643. if test == "allof":
  644. return all(radicale_filter.prop_match(item.vobject_item, f, "CR")
  645. for f in filter_)
  646. raise ValueError("Unsupported filter test: %r" % test)
  647. raise ValueError("Unsupported filter %r for %r" %
  648. (filter_.tag, collection_tag))
  649. class ApplicationPartReport(ApplicationBase):
  650. def do_REPORT(self, environ: types.WSGIEnviron, base_prefix: str,
  651. path: str, user: str) -> types.WSGIResponse:
  652. """Manage REPORT request."""
  653. access = Access(self._rights, user, path)
  654. if not access.check("r"):
  655. return httputils.NOT_ALLOWED
  656. try:
  657. xml_content = self._read_xml_request_body(environ)
  658. except RuntimeError as e:
  659. logger.warning("Bad REPORT request on %r: %s", path, e,
  660. exc_info=True)
  661. return httputils.BAD_REQUEST
  662. except socket.timeout:
  663. logger.debug("Client timed out", exc_info=True)
  664. return httputils.REQUEST_TIMEOUT
  665. with contextlib.ExitStack() as lock_stack:
  666. lock_stack.enter_context(self._storage.acquire_lock("r", user))
  667. item = next(iter(self._storage.discover(path)), None)
  668. if not item:
  669. return httputils.NOT_FOUND
  670. if not access.check("r", item):
  671. return httputils.NOT_ALLOWED
  672. if isinstance(item, storage.BaseCollection):
  673. collection = item
  674. else:
  675. assert item.collection is not None
  676. collection = item.collection
  677. max_occurrence = self.configuration.get("reporting", "max_freebusy_occurrence")
  678. if xml_content is not None and \
  679. xml_content.tag == xmlutils.make_clark("C:free-busy-query"):
  680. try:
  681. status, body = free_busy_report(
  682. base_prefix, path, xml_content, collection, self._encoding,
  683. lock_stack.close, max_occurrence)
  684. except ValueError as e:
  685. logger.warning(
  686. "Bad REPORT request on %r: %s", path, e, exc_info=True)
  687. return httputils.BAD_REQUEST
  688. headers = {"Content-Type": "text/calendar; charset=%s" % self._encoding}
  689. return status, headers, str(body)
  690. else:
  691. try:
  692. status, xml_answer = xml_report(
  693. base_prefix, path, xml_content, collection, self._encoding,
  694. lock_stack.close, max_occurrence)
  695. except ValueError as e:
  696. logger.warning(
  697. "Bad REPORT request on %r: %s", path, e, exc_info=True)
  698. return httputils.BAD_REQUEST
  699. headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
  700. return status, headers, self._xml_response(xml_answer)