report.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770
  1. # This file is part of Radicale - CalDAV and CardDAV server
  2. # Copyright © 2008 Nicolas Kandel
  3. # Copyright © 2008 Pascal Halter
  4. # Copyright © 2008-2017 Guillaume Ayoub
  5. # Copyright © 2017-2021 Unrud <unrud@outlook.com>
  6. # Copyright © 2024-2024 Pieter Hijma <pieterhijma@users.noreply.github.com>
  7. # Copyright © 2024-2024 Ray <ray@react0r.com>
  8. # Copyright © 2024-2025 Georgiy <metallerok@gmail.com>
  9. # Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
  10. # Copyright © 2025-2025 David Greaves <david@dgreaves.com>
  11. #
  12. # This library is free software: you can redistribute it and/or modify
  13. # it under the terms of the GNU General Public License as published by
  14. # the Free Software Foundation, either version 3 of the License, or
  15. # (at your option) any later version.
  16. #
  17. # This library is distributed in the hope that it will be useful,
  18. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  19. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20. # GNU General Public License for more details.
  21. #
  22. # You should have received a copy of the GNU General Public License
  23. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  24. import contextlib
  25. import copy
  26. import datetime
  27. import posixpath
  28. import socket
  29. import xml.etree.ElementTree as ET
  30. from http import client
  31. from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
  32. Tuple, Union)
  33. from urllib.parse import unquote, urlparse
  34. import vobject
  35. import vobject.base
  36. from vobject.base import ContentLine
  37. import radicale.item as radicale_item
  38. from radicale import httputils, pathutils, storage, types, xmlutils
  39. from radicale.app.base import Access, ApplicationBase
  40. from radicale.item import filter as radicale_filter
  41. from radicale.log import logger
  42. def free_busy_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
  43. collection: storage.BaseCollection, encoding: str,
  44. unlock_storage_fn: Callable[[], None],
  45. max_occurrence: int
  46. ) -> Tuple[int, Union[ET.Element, str]]:
  47. # NOTE: this function returns both an Element and a string because
  48. # free-busy reports are an edge-case on the return type according
  49. # to the spec.
  50. multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
  51. if xml_request is None:
  52. return client.MULTI_STATUS, multistatus
  53. root = xml_request
  54. if (root.tag == xmlutils.make_clark("C:free-busy-query") and
  55. collection.tag != "VCALENDAR"):
  56. logger.warning("Invalid REPORT method %r on %r requested",
  57. xmlutils.make_human_tag(root.tag), path)
  58. return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
  59. time_range_element = root.find(xmlutils.make_clark("C:time-range"))
  60. assert isinstance(time_range_element, ET.Element)
  61. # Build a single filter from the free busy query for retrieval
  62. # TODO: filter for VFREEBUSY in additional to VEVENT but
  63. # test_filter doesn't support that yet.
  64. vevent_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
  65. attrib={'name': 'VEVENT'})
  66. vevent_cf_element.append(time_range_element)
  67. vcalendar_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
  68. attrib={'name': 'VCALENDAR'})
  69. vcalendar_cf_element.append(vevent_cf_element)
  70. filter_element = ET.Element(xmlutils.make_clark("C:filter"))
  71. filter_element.append(vcalendar_cf_element)
  72. filters = (filter_element,)
  73. # First pull from storage
  74. retrieved_items = list(collection.get_filtered(filters))
  75. # !!! Don't access storage after this !!!
  76. unlock_storage_fn()
  77. cal = vobject.iCalendar()
  78. collection_tag = collection.tag
  79. while retrieved_items:
  80. # Second filtering before evaluating occurrences.
  81. # ``item.vobject_item`` might be accessed during filtering.
  82. # Don't keep reference to ``item``, because VObject requires a lot of
  83. # memory.
  84. item, filter_matched = retrieved_items.pop(0)
  85. if not filter_matched:
  86. try:
  87. if not test_filter(collection_tag, item, filter_element):
  88. continue
  89. except ValueError as e:
  90. raise ValueError("Failed to free-busy filter item %r from %r: %s" %
  91. (item.href, collection.path, e)) from e
  92. except Exception as e:
  93. raise RuntimeError("Failed to free-busy filter item %r from %r: %s" %
  94. (item.href, collection.path, e)) from e
  95. fbtype = None
  96. if item.component_name == 'VEVENT':
  97. transp = getattr(item.vobject_item.vevent, 'transp', None)
  98. if transp and transp.value != 'OPAQUE':
  99. continue
  100. status = getattr(item.vobject_item.vevent, 'status', None)
  101. if not status or status.value == 'CONFIRMED':
  102. fbtype = 'BUSY'
  103. elif status.value == 'CANCELLED':
  104. fbtype = 'FREE'
  105. elif status.value == 'TENTATIVE':
  106. fbtype = 'BUSY-TENTATIVE'
  107. else:
  108. # Could do fbtype = status.value for x-name, I prefer this
  109. fbtype = 'BUSY'
  110. # TODO: coalesce overlapping periods
  111. if max_occurrence > 0:
  112. n_occurrences = max_occurrence+1
  113. else:
  114. n_occurrences = 0
  115. occurrences = radicale_filter.time_range_fill(item.vobject_item,
  116. time_range_element,
  117. "VEVENT",
  118. n=n_occurrences)
  119. if len(occurrences) >= max_occurrence:
  120. raise ValueError("FREEBUSY occurrences limit of {} hit"
  121. .format(max_occurrence))
  122. for occurrence in occurrences:
  123. vfb = cal.add('vfreebusy')
  124. vfb.add('dtstamp').value = item.vobject_item.vevent.dtstamp.value
  125. vfb.add('dtstart').value, vfb.add('dtend').value = occurrence
  126. if fbtype:
  127. vfb.add('fbtype').value = fbtype
  128. return (client.OK, cal.serialize())
  129. def xml_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
  130. collection: storage.BaseCollection, encoding: str,
  131. unlock_storage_fn: Callable[[], None]
  132. ) -> Tuple[int, ET.Element]:
  133. """Read and answer REPORT requests that return XML.
  134. Read rfc3253-3.6 for info.
  135. """
  136. multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
  137. if xml_request is None:
  138. return client.MULTI_STATUS, multistatus
  139. root = xml_request
  140. if root.tag in (xmlutils.make_clark("D:principal-search-property-set"),
  141. xmlutils.make_clark("D:principal-property-search"),
  142. xmlutils.make_clark("D:expand-property")):
  143. # We don't support searching for principals or indirect retrieving of
  144. # properties, just return an empty result.
  145. # InfCloud asks for expand-property reports (even if we don't announce
  146. # support for them) and stops working if an error code is returned.
  147. logger.warning("Unsupported REPORT method %r on %r requested",
  148. xmlutils.make_human_tag(root.tag), path)
  149. return client.MULTI_STATUS, multistatus
  150. if (root.tag == xmlutils.make_clark("C:calendar-multiget") and
  151. collection.tag != "VCALENDAR" or
  152. root.tag == xmlutils.make_clark("CR:addressbook-multiget") and
  153. collection.tag != "VADDRESSBOOK" or
  154. root.tag == xmlutils.make_clark("D:sync-collection") and
  155. collection.tag not in ("VADDRESSBOOK", "VCALENDAR")):
  156. logger.warning("Invalid REPORT method %r on %r requested",
  157. xmlutils.make_human_tag(root.tag), path)
  158. return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
  159. props: Union[ET.Element, List]
  160. if root.find(xmlutils.make_clark("D:prop")) is not None:
  161. props = root.find(xmlutils.make_clark("D:prop")) # type: ignore[assignment]
  162. else:
  163. props = []
  164. hreferences: Iterable[str]
  165. if root.tag in (
  166. xmlutils.make_clark("C:calendar-multiget"),
  167. xmlutils.make_clark("CR:addressbook-multiget")):
  168. # Read rfc4791-7.9 for info
  169. hreferences = set()
  170. for href_element in root.findall(xmlutils.make_clark("D:href")):
  171. temp_url_path = urlparse(href_element.text).path
  172. assert isinstance(temp_url_path, str)
  173. href_path = pathutils.sanitize_path(unquote(temp_url_path))
  174. if (href_path + "/").startswith(base_prefix + "/"):
  175. hreferences.add(href_path[len(base_prefix):])
  176. else:
  177. logger.warning("Skipping invalid path %r in REPORT request on "
  178. "%r", href_path, path)
  179. elif root.tag == xmlutils.make_clark("D:sync-collection"):
  180. old_sync_token_element = root.find(
  181. xmlutils.make_clark("D:sync-token"))
  182. old_sync_token = ""
  183. if old_sync_token_element is not None and old_sync_token_element.text:
  184. old_sync_token = old_sync_token_element.text.strip()
  185. logger.debug("Client provided sync token: %r", old_sync_token)
  186. try:
  187. sync_token, names = collection.sync(old_sync_token)
  188. except ValueError as e:
  189. # Invalid sync token
  190. logger.warning("Client provided invalid sync token %r: %s",
  191. old_sync_token, e, exc_info=True)
  192. # client.CONFLICT doesn't work with some clients (e.g. InfCloud)
  193. return (client.FORBIDDEN,
  194. xmlutils.webdav_error("D:valid-sync-token"))
  195. hreferences = (pathutils.unstrip_path(
  196. posixpath.join(collection.path, n)) for n in names)
  197. # Append current sync token to response
  198. sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token"))
  199. sync_token_element.text = sync_token
  200. multistatus.append(sync_token_element)
  201. else:
  202. hreferences = (path,)
  203. filters = (
  204. root.findall(xmlutils.make_clark("C:filter")) +
  205. root.findall(xmlutils.make_clark("CR:filter")))
  206. expand = root.find(".//" + xmlutils.make_clark("C:expand"))
  207. # if we have expand prop we use "filter (except time range) -> expand -> filter (only time range)" approach
  208. time_range_element = None
  209. main_filters = []
  210. for filter_ in filters:
  211. # extract time-range filter for processing after main filters
  212. # for expand request
  213. time_range_element = filter_.find(".//" + xmlutils.make_clark("C:time-range"))
  214. if expand is None or time_range_element is None:
  215. main_filters.append(filter_)
  216. # Retrieve everything required for finishing the request.
  217. retrieved_items = list(retrieve_items(
  218. base_prefix, path, collection, hreferences, main_filters, multistatus))
  219. collection_tag = collection.tag
  220. # !!! Don't access storage after this !!!
  221. unlock_storage_fn()
  222. while retrieved_items:
  223. # ``item.vobject_item`` might be accessed during filtering.
  224. # Don't keep reference to ``item``, because VObject requires a lot of
  225. # memory.
  226. item, filters_matched = retrieved_items.pop(0)
  227. if filters and not filters_matched:
  228. try:
  229. if not all(test_filter(collection_tag, item, filter_)
  230. for filter_ in main_filters):
  231. continue
  232. except ValueError as e:
  233. raise ValueError("Failed to filter item %r from %r: %s" %
  234. (item.href, collection.path, e)) from e
  235. except Exception as e:
  236. raise RuntimeError("Failed to filter item %r from %r: %s" %
  237. (item.href, collection.path, e)) from e
  238. found_props = []
  239. not_found_props = []
  240. for prop in props:
  241. element = ET.Element(prop.tag)
  242. if prop.tag == xmlutils.make_clark("D:getetag"):
  243. element.text = item.etag
  244. found_props.append(element)
  245. elif prop.tag == xmlutils.make_clark("D:getcontenttype"):
  246. element.text = xmlutils.get_content_type(item, encoding)
  247. found_props.append(element)
  248. elif prop.tag in (
  249. xmlutils.make_clark("C:calendar-data"),
  250. xmlutils.make_clark("CR:address-data")):
  251. element.text = item.serialize()
  252. if (expand is not None) and item.component_name == 'VEVENT':
  253. starts = expand.get('start')
  254. ends = expand.get('end')
  255. if (starts is None) or (ends is None):
  256. return client.FORBIDDEN, \
  257. xmlutils.webdav_error("C:expand")
  258. start = datetime.datetime.strptime(
  259. starts, '%Y%m%dT%H%M%SZ'
  260. ).replace(tzinfo=datetime.timezone.utc)
  261. end = datetime.datetime.strptime(
  262. ends, '%Y%m%dT%H%M%SZ'
  263. ).replace(tzinfo=datetime.timezone.utc)
  264. time_range_start = None
  265. time_range_end = None
  266. if time_range_element is not None:
  267. time_range_start, time_range_end = radicale_filter.parse_time_range(time_range_element)
  268. expanded_element = _expand(
  269. element=element, item=copy.copy(item),
  270. start=start, end=end,
  271. time_range_start=time_range_start, time_range_end=time_range_end,
  272. )
  273. found_props.append(expanded_element)
  274. else:
  275. found_props.append(element)
  276. else:
  277. not_found_props.append(element)
  278. assert item.href
  279. uri = pathutils.unstrip_path(
  280. posixpath.join(collection.path, item.href))
  281. multistatus.append(xml_item_response(
  282. base_prefix, uri, found_props=found_props,
  283. not_found_props=not_found_props, found_item=True))
  284. return client.MULTI_STATUS, multistatus
  285. def _expand(
  286. element: ET.Element,
  287. item: radicale_item.Item,
  288. start: datetime.datetime,
  289. end: datetime.datetime,
  290. time_range_start: Optional[datetime.datetime] = None,
  291. time_range_end: Optional[datetime.datetime] = None,
  292. ) -> ET.Element:
  293. vevent_component: vobject.base.Component = copy.copy(item.vobject_item)
  294. logger.info("Expanding event %s", item.href)
  295. # Split the vevents included in the component into one that contains the
  296. # recurrence information and others that contain a recurrence id to
  297. # override instances.
  298. vevent_recurrence, vevents_overridden = _split_overridden_vevents(vevent_component)
  299. dt_format = '%Y%m%dT%H%M%SZ'
  300. all_day_event = False
  301. if type(vevent_recurrence.dtstart.value) is datetime.date:
  302. # If an event comes to us with a dtstart specified as a date
  303. # then in the response we return the date, not datetime
  304. dt_format = '%Y%m%d'
  305. all_day_event = True
  306. # In case of dates, we need to remove timezone information since
  307. # rruleset.between computes with datetimes without timezone information
  308. start = start.replace(tzinfo=None)
  309. end = end.replace(tzinfo=None)
  310. if time_range_start is not None and time_range_end is not None:
  311. time_range_start = time_range_start.replace(tzinfo=None)
  312. time_range_end = time_range_end.replace(tzinfo=None)
  313. for vevent in vevents_overridden:
  314. _strip_single_event(vevent, dt_format)
  315. duration = None
  316. if hasattr(vevent_recurrence, "dtend"):
  317. duration = vevent_recurrence.dtend.value - vevent_recurrence.dtstart.value
  318. elif hasattr(vevent_recurrence, "duration"):
  319. try:
  320. duration = vevent_recurrence.duration.value
  321. if duration.total_seconds() <= 0:
  322. logger.warning("Invalid DURATION: %s", duration)
  323. duration = None
  324. except (AttributeError, TypeError) as e:
  325. logger.warning("Failed to parse DURATION: %s", e)
  326. duration = None
  327. # Generate EXDATE to remove from expansion range
  328. exdates_set: set[datetime.datetime] = set()
  329. if hasattr(vevent_recurrence, 'exdate'):
  330. exdates = vevent_recurrence.exdate.value
  331. if not isinstance(exdates, list):
  332. exdates = [exdates]
  333. exdates_set = {
  334. exdate.astimezone(datetime.timezone.utc) if isinstance(exdate, datetime.datetime)
  335. else datetime.datetime.fromordinal(exdate.toordinal()).replace(tzinfo=None)
  336. for exdate in exdates
  337. }
  338. logger.debug("EXDATE values: %s", exdates_set)
  339. rruleset = None
  340. if hasattr(vevent_recurrence, 'rrule'):
  341. rruleset = vevent_recurrence.getrruleset()
  342. filtered_vevents = []
  343. if rruleset:
  344. # This function uses datetimes internally without timezone info for dates
  345. # A vobject rruleset is for the event dtstart.
  346. # Expanded over a given time range this will not include
  347. # events which started before the time range but are still
  348. # ongoing at the start of the range
  349. # To accomodate this, reduce the start time by the duration of
  350. # the event. If this introduces an extra reccurence point then
  351. # that event should be included as it is still ongoing. If no
  352. # extra point is generated then it was a no-op.
  353. rstart = start - duration if duration and duration.total_seconds() > 0 else start
  354. recurrences = rruleset.between(rstart, end, inc=True)
  355. _strip_component(vevent_component)
  356. _strip_single_event(vevent_recurrence, dt_format)
  357. i_overridden = 0
  358. for recurrence_dt in recurrences:
  359. recurrence_utc = recurrence_dt if all_day_event else recurrence_dt.astimezone(datetime.timezone.utc)
  360. logger.debug("Processing recurrence: %s (all_day_event: %s)", recurrence_utc, all_day_event)
  361. # Apply time-range filter
  362. if time_range_start is not None and time_range_end is not None:
  363. dtstart = recurrence_utc
  364. dtend = dtstart + duration if duration else dtstart
  365. # Start includes the time, end does not
  366. if not (dtstart <= time_range_end and dtend > time_range_start):
  367. logger.debug("Recurrence %s filtered out by time-range", recurrence_utc)
  368. continue
  369. # Check exdate
  370. if recurrence_utc in exdates_set:
  371. logger.debug("Recurrence %s excluded by EXDATE", recurrence_utc)
  372. continue
  373. # Check for overridden instances
  374. i_overridden, vevent = _find_overridden(i_overridden, vevents_overridden, recurrence_utc, dt_format)
  375. if not vevent:
  376. # Create new instance from recurrence
  377. vevent = copy.deepcopy(vevent_recurrence)
  378. # For all day events, the system timezone may influence the
  379. # results, so use recurrence_dt
  380. recurrence_id = recurrence_dt if all_day_event else recurrence_utc
  381. logger.debug("Creating new VEVENT with RECURRENCE-ID: %s", recurrence_id)
  382. vevent.recurrence_id = ContentLine(
  383. name='RECURRENCE-ID',
  384. value=recurrence_id, params={}
  385. )
  386. _convert_to_utc(vevent, 'recurrence_id', dt_format)
  387. vevent.dtstart = ContentLine(
  388. name='DTSTART',
  389. value=recurrence_id.strftime(dt_format), params={}
  390. )
  391. # if there is a DTEND, override it. Duration does not need changing
  392. if hasattr(vevent, "dtend"):
  393. vevent.dtend = ContentLine(
  394. name='DTEND',
  395. value=(recurrence_id + duration).strftime(dt_format), params={}
  396. )
  397. filtered_vevents.append(vevent)
  398. # Filter overridden and recurrence base events
  399. if time_range_start is not None and time_range_end is not None:
  400. for vevent in vevents_overridden:
  401. dtstart = vevent.dtstart.value
  402. # Handle string values for DTSTART/DTEND
  403. if isinstance(dtstart, str):
  404. try:
  405. dtstart = datetime.datetime.strptime(dtstart, dt_format)
  406. if all_day_event:
  407. dtstart = dtstart.date()
  408. except ValueError as e:
  409. logger.warning("Invalid DTSTART format: %s, error: %s", dtstart, e)
  410. continue
  411. dtend = dtstart + duration if duration else dtstart
  412. logger.debug(
  413. "Filtering VEVENT with DTSTART: %s (type: %s), DTEND: %s (type: %s)",
  414. dtstart, type(dtstart), dtend, type(dtend))
  415. # Convert to datetime for comparison
  416. if all_day_event and isinstance(dtstart, datetime.date) and not isinstance(dtstart, datetime.datetime):
  417. dtstart = datetime.datetime.fromordinal(dtstart.toordinal()).replace(tzinfo=None)
  418. dtend = datetime.datetime.fromordinal(dtend.toordinal()).replace(tzinfo=None)
  419. elif not all_day_event and isinstance(dtstart, datetime.datetime) \
  420. and isinstance(dtend, datetime.datetime):
  421. dtstart = dtstart.replace(tzinfo=datetime.timezone.utc)
  422. dtend = dtend.replace(tzinfo=datetime.timezone.utc)
  423. else:
  424. logger.warning("Unexpected DTSTART/DTEND type: dtstart=%s, dtend=%s", type(dtstart), type(dtend))
  425. continue
  426. if dtstart < time_range_end and dtend > time_range_start:
  427. if vevent not in filtered_vevents: # Avoid duplicates
  428. logger.debug("VEVENT passed time-range filter: %s", dtstart)
  429. filtered_vevents.append(vevent)
  430. else:
  431. logger.debug("VEVENT filtered out: %s", dtstart)
  432. # Rebuild component
  433. if not filtered_vevents:
  434. element.text = ""
  435. return element
  436. else:
  437. vevent_component.vevent_list = filtered_vevents
  438. logger.debug("lbt: vevent_component %s", vevent_component)
  439. element.text = vevent_component.serialize()
  440. return element
  441. def _convert_timezone(vevent: vobject.icalendar.RecurringComponent,
  442. name_prop: str,
  443. name_content_line: str):
  444. prop = getattr(vevent, name_prop, None)
  445. if prop:
  446. if type(prop.value) is datetime.date:
  447. date_time = datetime.datetime.fromordinal(
  448. prop.value.toordinal()
  449. ).replace(tzinfo=datetime.timezone.utc)
  450. else:
  451. date_time = prop.value.astimezone(datetime.timezone.utc)
  452. setattr(vevent, name_prop, ContentLine(name=name_content_line, value=date_time, params=[]))
  453. def _convert_to_utc(vevent: vobject.icalendar.RecurringComponent,
  454. name_prop: str,
  455. dt_format: str):
  456. prop = getattr(vevent, name_prop, None)
  457. if prop:
  458. setattr(vevent, name_prop, ContentLine(name=prop.name, value=prop.value.strftime(dt_format), params=[]))
  459. def _strip_single_event(vevent: vobject.icalendar.RecurringComponent, dt_format: str) -> None:
  460. _convert_timezone(vevent, 'dtstart', 'DTSTART')
  461. _convert_timezone(vevent, 'dtend', 'DTEND')
  462. _convert_timezone(vevent, 'recurrence_id', 'RECURRENCE-ID')
  463. # There is something strange behaviour during serialization native datetime, so converting manually
  464. _convert_to_utc(vevent, 'dtstart', dt_format)
  465. _convert_to_utc(vevent, 'dtend', dt_format)
  466. _convert_to_utc(vevent, 'recurrence_id', dt_format)
  467. try:
  468. delattr(vevent, 'rrule')
  469. delattr(vevent, 'exdate')
  470. delattr(vevent, 'exrule')
  471. delattr(vevent, 'rdate')
  472. except AttributeError:
  473. pass
  474. def _strip_component(vevent: vobject.base.Component) -> None:
  475. timezones_to_remove = []
  476. for component in vevent.components():
  477. if component.name == 'VTIMEZONE':
  478. timezones_to_remove.append(component)
  479. for timezone in timezones_to_remove:
  480. vevent.remove(timezone)
  481. def _split_overridden_vevents(
  482. component: vobject.base.Component,
  483. ) -> Tuple[
  484. vobject.icalendar.RecurringComponent,
  485. List[vobject.icalendar.RecurringComponent]
  486. ]:
  487. vevent_recurrence = None
  488. vevents_overridden = []
  489. for vevent in component.vevent_list:
  490. if hasattr(vevent, 'recurrence_id'):
  491. vevents_overridden += [vevent]
  492. elif vevent_recurrence:
  493. raise ValueError(
  494. f"component with UID {vevent.uid} "
  495. f"has more than one vevent with recurrence information"
  496. )
  497. else:
  498. vevent_recurrence = vevent
  499. if vevent_recurrence:
  500. return (
  501. vevent_recurrence, sorted(
  502. vevents_overridden,
  503. key=lambda vevent: vevent.recurrence_id.value
  504. )
  505. )
  506. else:
  507. raise ValueError(
  508. f"component with UID {vevent.uid} "
  509. f"does not have a vevent without a recurrence_id"
  510. )
  511. def _find_overridden(
  512. start: int,
  513. vevents: List[vobject.icalendar.RecurringComponent],
  514. dt: datetime.datetime,
  515. dt_format: str
  516. ) -> Tuple[int, Optional[vobject.icalendar.RecurringComponent]]:
  517. for i in range(start, len(vevents)):
  518. dt_event = datetime.datetime.strptime(
  519. vevents[i].recurrence_id.value,
  520. dt_format
  521. ).replace(tzinfo=datetime.timezone.utc)
  522. if dt_event == dt:
  523. return (i + 1, vevents[i])
  524. return (start, None)
  525. def xml_item_response(base_prefix: str, href: str,
  526. found_props: Sequence[ET.Element] = (),
  527. not_found_props: Sequence[ET.Element] = (),
  528. found_item: bool = True) -> ET.Element:
  529. response = ET.Element(xmlutils.make_clark("D:response"))
  530. href_element = ET.Element(xmlutils.make_clark("D:href"))
  531. href_element.text = xmlutils.make_href(base_prefix, href)
  532. response.append(href_element)
  533. if found_item:
  534. for code, props in ((200, found_props), (404, not_found_props)):
  535. if props:
  536. propstat = ET.Element(xmlutils.make_clark("D:propstat"))
  537. status = ET.Element(xmlutils.make_clark("D:status"))
  538. status.text = xmlutils.make_response(code)
  539. prop_element = ET.Element(xmlutils.make_clark("D:prop"))
  540. for prop in props:
  541. prop_element.append(prop)
  542. propstat.append(prop_element)
  543. propstat.append(status)
  544. response.append(propstat)
  545. else:
  546. status = ET.Element(xmlutils.make_clark("D:status"))
  547. status.text = xmlutils.make_response(404)
  548. response.append(status)
  549. return response
  550. def retrieve_items(
  551. base_prefix: str, path: str, collection: storage.BaseCollection,
  552. hreferences: Iterable[str], filters: Sequence[ET.Element],
  553. multistatus: ET.Element) -> Iterator[Tuple[radicale_item.Item, bool]]:
  554. """Retrieves all items that are referenced in ``hreferences`` from
  555. ``collection`` and adds 404 responses for missing and invalid items
  556. to ``multistatus``."""
  557. collection_requested = False
  558. def get_names() -> Iterator[str]:
  559. """Extracts all names from references in ``hreferences`` and adds
  560. 404 responses for invalid references to ``multistatus``.
  561. If the whole collections is referenced ``collection_requested``
  562. gets set to ``True``."""
  563. nonlocal collection_requested
  564. for hreference in hreferences:
  565. try:
  566. name = pathutils.name_from_path(hreference, collection)
  567. except ValueError as e:
  568. logger.warning("Skipping invalid path %r in REPORT request on "
  569. "%r: %s", hreference, path, e)
  570. response = xml_item_response(base_prefix, hreference,
  571. found_item=False)
  572. multistatus.append(response)
  573. continue
  574. if name:
  575. # Reference is an item
  576. yield name
  577. else:
  578. # Reference is a collection
  579. collection_requested = True
  580. for name, item in collection.get_multi(get_names()):
  581. if not item:
  582. uri = pathutils.unstrip_path(posixpath.join(collection.path, name))
  583. response = xml_item_response(base_prefix, uri, found_item=False)
  584. multistatus.append(response)
  585. else:
  586. yield item, False
  587. if collection_requested:
  588. yield from collection.get_filtered(filters)
  589. def test_filter(collection_tag: str, item: radicale_item.Item,
  590. filter_: ET.Element) -> bool:
  591. """Match an item against a filter."""
  592. if (collection_tag == "VCALENDAR" and
  593. filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
  594. if len(filter_) == 0:
  595. return True
  596. if len(filter_) > 1:
  597. raise ValueError("Filter with %d children" % len(filter_))
  598. if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
  599. raise ValueError("Unexpected %r in filter" % filter_[0].tag)
  600. return radicale_filter.comp_match(item, filter_[0])
  601. if (collection_tag == "VADDRESSBOOK" and
  602. filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
  603. for child in filter_:
  604. if child.tag != xmlutils.make_clark("CR:prop-filter"):
  605. raise ValueError("Unexpected %r in filter" % child.tag)
  606. test = filter_.get("test", "anyof")
  607. if test == "anyof":
  608. return any(radicale_filter.prop_match(item.vobject_item, f, "CR")
  609. for f in filter_)
  610. if test == "allof":
  611. return all(radicale_filter.prop_match(item.vobject_item, f, "CR")
  612. for f in filter_)
  613. raise ValueError("Unsupported filter test: %r" % test)
  614. raise ValueError("Unsupported filter %r for %r" %
  615. (filter_.tag, collection_tag))
  616. class ApplicationPartReport(ApplicationBase):
  617. def do_REPORT(self, environ: types.WSGIEnviron, base_prefix: str,
  618. path: str, user: str) -> types.WSGIResponse:
  619. """Manage REPORT request."""
  620. access = Access(self._rights, user, path)
  621. if not access.check("r"):
  622. return httputils.NOT_ALLOWED
  623. try:
  624. xml_content = self._read_xml_request_body(environ)
  625. except RuntimeError as e:
  626. logger.warning("Bad REPORT request on %r: %s", path, e,
  627. exc_info=True)
  628. return httputils.BAD_REQUEST
  629. except socket.timeout:
  630. logger.debug("Client timed out", exc_info=True)
  631. return httputils.REQUEST_TIMEOUT
  632. with contextlib.ExitStack() as lock_stack:
  633. lock_stack.enter_context(self._storage.acquire_lock("r", user))
  634. item = next(iter(self._storage.discover(path)), None)
  635. if not item:
  636. return httputils.NOT_FOUND
  637. if not access.check("r", item):
  638. return httputils.NOT_ALLOWED
  639. if isinstance(item, storage.BaseCollection):
  640. collection = item
  641. else:
  642. assert item.collection is not None
  643. collection = item.collection
  644. if xml_content is not None and \
  645. xml_content.tag == xmlutils.make_clark("C:free-busy-query"):
  646. max_occurrence = self.configuration.get("reporting", "max_freebusy_occurrence")
  647. try:
  648. status, body = free_busy_report(
  649. base_prefix, path, xml_content, collection, self._encoding,
  650. lock_stack.close, max_occurrence)
  651. except ValueError as e:
  652. logger.warning(
  653. "Bad REPORT request on %r: %s", path, e, exc_info=True)
  654. return httputils.BAD_REQUEST
  655. headers = {"Content-Type": "text/calendar; charset=%s" % self._encoding}
  656. return status, headers, str(body)
  657. else:
  658. try:
  659. status, xml_answer = xml_report(
  660. base_prefix, path, xml_content, collection, self._encoding,
  661. lock_stack.close)
  662. except ValueError as e:
  663. logger.warning(
  664. "Bad REPORT request on %r: %s", path, e, exc_info=True)
  665. return httputils.BAD_REQUEST
  666. headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
  667. return status, headers, self._xml_response(xml_answer)