report.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769
  1. # This file is part of Radicale - CalDAV and CardDAV server
  2. # Copyright © 2008 Nicolas Kandel
  3. # Copyright © 2008 Pascal Halter
  4. # Copyright © 2008-2017 Guillaume Ayoub
  5. # Copyright © 2017-2021 Unrud <unrud@outlook.com>
  6. # Copyright © 2024-2024 Pieter Hijma <pieterhijma@users.noreply.github.com>
  7. # Copyright © 2024-2024 Ray <ray@react0r.com>
  8. # Copyright © 2024-2024 Georgiy <metallerok@gmail.com>
  9. # Copyright © 2024-2025 Peter Bieringer <pb@bieringer.de>
  10. #
  11. # This library is free software: you can redistribute it and/or modify
  12. # it under the terms of the GNU General Public License as published by
  13. # the Free Software Foundation, either version 3 of the License, or
  14. # (at your option) any later version.
  15. #
  16. # This library is distributed in the hope that it will be useful,
  17. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19. # GNU General Public License for more details.
  20. #
  21. # You should have received a copy of the GNU General Public License
  22. # along with Radicale. If not, see <http://www.gnu.org/licenses/>.
  23. import contextlib
  24. import copy
  25. import datetime
  26. import posixpath
  27. import socket
  28. import xml.etree.ElementTree as ET
  29. from http import client
  30. from typing import (Callable, Iterable, Iterator, List, Optional, Sequence,
  31. Tuple, Union)
  32. from urllib.parse import unquote, urlparse
  33. import vobject
  34. import vobject.base
  35. from vobject.base import ContentLine
  36. import radicale.item as radicale_item
  37. from radicale import httputils, pathutils, storage, types, xmlutils
  38. from radicale.app.base import Access, ApplicationBase
  39. from radicale.item import filter as radicale_filter
  40. from radicale.log import logger
  41. def free_busy_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
  42. collection: storage.BaseCollection, encoding: str,
  43. unlock_storage_fn: Callable[[], None],
  44. max_occurrence: int
  45. ) -> Tuple[int, Union[ET.Element, str]]:
  46. # NOTE: this function returns both an Element and a string because
  47. # free-busy reports are an edge-case on the return type according
  48. # to the spec.
  49. multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
  50. if xml_request is None:
  51. return client.MULTI_STATUS, multistatus
  52. root = xml_request
  53. if (root.tag == xmlutils.make_clark("C:free-busy-query") and
  54. collection.tag != "VCALENDAR"):
  55. logger.warning("Invalid REPORT method %r on %r requested",
  56. xmlutils.make_human_tag(root.tag), path)
  57. return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
  58. time_range_element = root.find(xmlutils.make_clark("C:time-range"))
  59. assert isinstance(time_range_element, ET.Element)
  60. # Build a single filter from the free busy query for retrieval
  61. # TODO: filter for VFREEBUSY in additional to VEVENT but
  62. # test_filter doesn't support that yet.
  63. vevent_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
  64. attrib={'name': 'VEVENT'})
  65. vevent_cf_element.append(time_range_element)
  66. vcalendar_cf_element = ET.Element(xmlutils.make_clark("C:comp-filter"),
  67. attrib={'name': 'VCALENDAR'})
  68. vcalendar_cf_element.append(vevent_cf_element)
  69. filter_element = ET.Element(xmlutils.make_clark("C:filter"))
  70. filter_element.append(vcalendar_cf_element)
  71. filters = (filter_element,)
  72. # First pull from storage
  73. retrieved_items = list(collection.get_filtered(filters))
  74. # !!! Don't access storage after this !!!
  75. unlock_storage_fn()
  76. cal = vobject.iCalendar()
  77. collection_tag = collection.tag
  78. while retrieved_items:
  79. # Second filtering before evaluating occurrences.
  80. # ``item.vobject_item`` might be accessed during filtering.
  81. # Don't keep reference to ``item``, because VObject requires a lot of
  82. # memory.
  83. item, filter_matched = retrieved_items.pop(0)
  84. if not filter_matched:
  85. try:
  86. if not test_filter(collection_tag, item, filter_element):
  87. continue
  88. except ValueError as e:
  89. raise ValueError("Failed to free-busy filter item %r from %r: %s" %
  90. (item.href, collection.path, e)) from e
  91. except Exception as e:
  92. raise RuntimeError("Failed to free-busy filter item %r from %r: %s" %
  93. (item.href, collection.path, e)) from e
  94. fbtype = None
  95. if item.component_name == 'VEVENT':
  96. transp = getattr(item.vobject_item.vevent, 'transp', None)
  97. if transp and transp.value != 'OPAQUE':
  98. continue
  99. status = getattr(item.vobject_item.vevent, 'status', None)
  100. if not status or status.value == 'CONFIRMED':
  101. fbtype = 'BUSY'
  102. elif status.value == 'CANCELLED':
  103. fbtype = 'FREE'
  104. elif status.value == 'TENTATIVE':
  105. fbtype = 'BUSY-TENTATIVE'
  106. else:
  107. # Could do fbtype = status.value for x-name, I prefer this
  108. fbtype = 'BUSY'
  109. # TODO: coalesce overlapping periods
  110. if max_occurrence > 0:
  111. n_occurrences = max_occurrence+1
  112. else:
  113. n_occurrences = 0
  114. occurrences = radicale_filter.time_range_fill(item.vobject_item,
  115. time_range_element,
  116. "VEVENT",
  117. n=n_occurrences)
  118. if len(occurrences) >= max_occurrence:
  119. raise ValueError("FREEBUSY occurrences limit of {} hit"
  120. .format(max_occurrence))
  121. for occurrence in occurrences:
  122. vfb = cal.add('vfreebusy')
  123. vfb.add('dtstamp').value = item.vobject_item.vevent.dtstamp.value
  124. vfb.add('dtstart').value, vfb.add('dtend').value = occurrence
  125. if fbtype:
  126. vfb.add('fbtype').value = fbtype
  127. return (client.OK, cal.serialize())
  128. def xml_report(base_prefix: str, path: str, xml_request: Optional[ET.Element],
  129. collection: storage.BaseCollection, encoding: str,
  130. unlock_storage_fn: Callable[[], None]
  131. ) -> Tuple[int, ET.Element]:
  132. """Read and answer REPORT requests that return XML.
  133. Read rfc3253-3.6 for info.
  134. """
  135. multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
  136. if xml_request is None:
  137. return client.MULTI_STATUS, multistatus
  138. root = xml_request
  139. if root.tag in (xmlutils.make_clark("D:principal-search-property-set"),
  140. xmlutils.make_clark("D:principal-property-search"),
  141. xmlutils.make_clark("D:expand-property")):
  142. # We don't support searching for principals or indirect retrieving of
  143. # properties, just return an empty result.
  144. # InfCloud asks for expand-property reports (even if we don't announce
  145. # support for them) and stops working if an error code is returned.
  146. logger.warning("Unsupported REPORT method %r on %r requested",
  147. xmlutils.make_human_tag(root.tag), path)
  148. return client.MULTI_STATUS, multistatus
  149. if (root.tag == xmlutils.make_clark("C:calendar-multiget") and
  150. collection.tag != "VCALENDAR" or
  151. root.tag == xmlutils.make_clark("CR:addressbook-multiget") and
  152. collection.tag != "VADDRESSBOOK" or
  153. root.tag == xmlutils.make_clark("D:sync-collection") and
  154. collection.tag not in ("VADDRESSBOOK", "VCALENDAR")):
  155. logger.warning("Invalid REPORT method %r on %r requested",
  156. xmlutils.make_human_tag(root.tag), path)
  157. return client.FORBIDDEN, xmlutils.webdav_error("D:supported-report")
  158. props: Union[ET.Element, List]
  159. if root.find(xmlutils.make_clark("D:prop")) is not None:
  160. props = root.find(xmlutils.make_clark("D:prop")) # type: ignore[assignment]
  161. else:
  162. props = []
  163. hreferences: Iterable[str]
  164. if root.tag in (
  165. xmlutils.make_clark("C:calendar-multiget"),
  166. xmlutils.make_clark("CR:addressbook-multiget")):
  167. # Read rfc4791-7.9 for info
  168. hreferences = set()
  169. for href_element in root.findall(xmlutils.make_clark("D:href")):
  170. temp_url_path = urlparse(href_element.text).path
  171. assert isinstance(temp_url_path, str)
  172. href_path = pathutils.sanitize_path(unquote(temp_url_path))
  173. if (href_path + "/").startswith(base_prefix + "/"):
  174. hreferences.add(href_path[len(base_prefix):])
  175. else:
  176. logger.warning("Skipping invalid path %r in REPORT request on "
  177. "%r", href_path, path)
  178. elif root.tag == xmlutils.make_clark("D:sync-collection"):
  179. old_sync_token_element = root.find(
  180. xmlutils.make_clark("D:sync-token"))
  181. old_sync_token = ""
  182. if old_sync_token_element is not None and old_sync_token_element.text:
  183. old_sync_token = old_sync_token_element.text.strip()
  184. logger.debug("Client provided sync token: %r", old_sync_token)
  185. try:
  186. sync_token, names = collection.sync(old_sync_token)
  187. except ValueError as e:
  188. # Invalid sync token
  189. logger.warning("Client provided invalid sync token %r: %s",
  190. old_sync_token, e, exc_info=True)
  191. # client.CONFLICT doesn't work with some clients (e.g. InfCloud)
  192. return (client.FORBIDDEN,
  193. xmlutils.webdav_error("D:valid-sync-token"))
  194. hreferences = (pathutils.unstrip_path(
  195. posixpath.join(collection.path, n)) for n in names)
  196. # Append current sync token to response
  197. sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token"))
  198. sync_token_element.text = sync_token
  199. multistatus.append(sync_token_element)
  200. else:
  201. hreferences = (path,)
  202. filters = (
  203. root.findall(xmlutils.make_clark("C:filter")) +
  204. root.findall(xmlutils.make_clark("CR:filter")))
  205. expand = root.find(".//" + xmlutils.make_clark("C:expand"))
  206. # if we have expand prop we use "filter (except time range) -> expand -> filter (only time range)" approach
  207. time_range_element = None
  208. main_filters = []
  209. for filter_ in filters:
  210. # extract time-range filter for processing after main filters
  211. # for expand request
  212. time_range_element = filter_.find(".//" + xmlutils.make_clark("C:time-range"))
  213. if expand is None or time_range_element is None:
  214. main_filters.append(filter_)
  215. # Retrieve everything required for finishing the request.
  216. retrieved_items = list(retrieve_items(
  217. base_prefix, path, collection, hreferences, main_filters, multistatus))
  218. collection_tag = collection.tag
  219. # !!! Don't access storage after this !!!
  220. unlock_storage_fn()
  221. while retrieved_items:
  222. # ``item.vobject_item`` might be accessed during filtering.
  223. # Don't keep reference to ``item``, because VObject requires a lot of
  224. # memory.
  225. item, filters_matched = retrieved_items.pop(0)
  226. if filters and not filters_matched:
  227. try:
  228. if not all(test_filter(collection_tag, item, filter_)
  229. for filter_ in main_filters):
  230. continue
  231. except ValueError as e:
  232. raise ValueError("Failed to filter item %r from %r: %s" %
  233. (item.href, collection.path, e)) from e
  234. except Exception as e:
  235. raise RuntimeError("Failed to filter item %r from %r: %s" %
  236. (item.href, collection.path, e)) from e
  237. found_props = []
  238. not_found_props = []
  239. for prop in props:
  240. element = ET.Element(prop.tag)
  241. if prop.tag == xmlutils.make_clark("D:getetag"):
  242. element.text = item.etag
  243. found_props.append(element)
  244. elif prop.tag == xmlutils.make_clark("D:getcontenttype"):
  245. element.text = xmlutils.get_content_type(item, encoding)
  246. found_props.append(element)
  247. elif prop.tag in (
  248. xmlutils.make_clark("C:calendar-data"),
  249. xmlutils.make_clark("CR:address-data")):
  250. element.text = item.serialize()
  251. if (expand is not None) and item.component_name == 'VEVENT':
  252. starts = expand.get('start')
  253. ends = expand.get('end')
  254. if (starts is None) or (ends is None):
  255. return client.FORBIDDEN, \
  256. xmlutils.webdav_error("C:expand")
  257. start = datetime.datetime.strptime(
  258. starts, '%Y%m%dT%H%M%SZ'
  259. ).replace(tzinfo=datetime.timezone.utc)
  260. end = datetime.datetime.strptime(
  261. ends, '%Y%m%dT%H%M%SZ'
  262. ).replace(tzinfo=datetime.timezone.utc)
  263. time_range_start = None
  264. time_range_end = None
  265. if time_range_element is not None:
  266. time_range_start, time_range_end = radicale_filter.parse_time_range(time_range_element)
  267. expanded_element = _expand(
  268. element=element, item=copy.copy(item),
  269. start=start, end=end,
  270. time_range_start=time_range_start, time_range_end=time_range_end,
  271. )
  272. found_props.append(expanded_element)
  273. else:
  274. found_props.append(element)
  275. else:
  276. not_found_props.append(element)
  277. assert item.href
  278. uri = pathutils.unstrip_path(
  279. posixpath.join(collection.path, item.href))
  280. multistatus.append(xml_item_response(
  281. base_prefix, uri, found_props=found_props,
  282. not_found_props=not_found_props, found_item=True))
  283. return client.MULTI_STATUS, multistatus
  284. def _expand(
  285. element: ET.Element,
  286. item: radicale_item.Item,
  287. start: datetime.datetime,
  288. end: datetime.datetime,
  289. time_range_start: Optional[datetime.datetime] = None,
  290. time_range_end: Optional[datetime.datetime] = None,
  291. ) -> ET.Element:
  292. vevent_component: vobject.base.Component = copy.copy(item.vobject_item)
  293. logger.info("Expanding event %s", item.href)
  294. # Split the vevents included in the component into one that contains the
  295. # recurrence information and others that contain a recurrence id to
  296. # override instances.
  297. vevent_recurrence, vevents_overridden = _split_overridden_vevents(vevent_component)
  298. dt_format = '%Y%m%dT%H%M%SZ'
  299. all_day_event = False
  300. if type(vevent_recurrence.dtstart.value) is datetime.date:
  301. # If an event comes to us with a dtstart specified as a date
  302. # then in the response we return the date, not datetime
  303. dt_format = '%Y%m%d'
  304. all_day_event = True
  305. # In case of dates, we need to remove timezone information since
  306. # rruleset.between computes with datetimes without timezone information
  307. start = start.replace(tzinfo=None)
  308. end = end.replace(tzinfo=None)
  309. if time_range_start is not None and time_range_end is not None:
  310. time_range_start = time_range_start.replace(tzinfo=None)
  311. time_range_end = time_range_end.replace(tzinfo=None)
  312. for vevent in vevents_overridden:
  313. _strip_single_event(vevent, dt_format)
  314. duration = None
  315. if hasattr(vevent_recurrence, "dtend"):
  316. duration = vevent_recurrence.dtend.value - vevent_recurrence.dtstart.value
  317. elif hasattr(vevent_recurrence, "duration"):
  318. try:
  319. duration = vevent_recurrence.duration.value
  320. if duration.total_seconds() <= 0:
  321. logger.warning("Invalid DURATION: %s", duration)
  322. duration = None
  323. except (AttributeError, TypeError) as e:
  324. logger.warning("Failed to parse DURATION: %s", e)
  325. duration = None
  326. # Generate EXDATE to remove from expansion range
  327. exdates_set: set[datetime.datetime] = set()
  328. if hasattr(vevent_recurrence, 'exdate'):
  329. exdates = vevent_recurrence.exdate.value
  330. if not isinstance(exdates, list):
  331. exdates = [exdates]
  332. exdates_set = {
  333. exdate.astimezone(datetime.timezone.utc) if isinstance(exdate, datetime.datetime)
  334. else datetime.datetime.fromordinal(exdate.toordinal()).replace(tzinfo=None)
  335. for exdate in exdates
  336. }
  337. logger.debug("EXDATE values: %s", exdates_set)
  338. rruleset = None
  339. if hasattr(vevent_recurrence, 'rrule'):
  340. rruleset = vevent_recurrence.getrruleset()
  341. filtered_vevents = []
  342. if rruleset:
  343. # This function uses datetimes internally without timezone info for dates
  344. # A vobject rruleset is for the event dtstart.
  345. # Expanded over a given time range this will not include
  346. # events which started before the time range but are still
  347. # ongoing at the start of the range
  348. # To accomodate this, reduce the start time by the duration of
  349. # the event. If this introduces an extra reccurence point then
  350. # that event should be included as it is still ongoing. If no
  351. # extra point is generated then it was a no-op.
  352. rstart = start - duration if duration and duration.total_seconds() > 0 else start
  353. recurrences = rruleset.between(rstart, end, inc=True)
  354. _strip_component(vevent_component)
  355. _strip_single_event(vevent_recurrence, dt_format)
  356. i_overridden = 0
  357. for recurrence_dt in recurrences:
  358. recurrence_utc = recurrence_dt if all_day_event else recurrence_dt.astimezone(datetime.timezone.utc)
  359. logger.debug("Processing recurrence: %s (all_day_event: %s)", recurrence_utc, all_day_event)
  360. # Apply time-range filter
  361. if time_range_start is not None and time_range_end is not None:
  362. dtstart = recurrence_utc
  363. dtend = dtstart + duration if duration else dtstart
  364. # Start includes the time, end does not
  365. if not (dtstart <= time_range_end and dtend > time_range_start):
  366. logger.debug("Recurrence %s filtered out by time-range", recurrence_utc)
  367. continue
  368. # Check exdate
  369. if recurrence_utc in exdates_set:
  370. logger.debug("Recurrence %s excluded by EXDATE", recurrence_utc)
  371. continue
  372. # Check for overridden instances
  373. i_overridden, vevent = _find_overridden(i_overridden, vevents_overridden, recurrence_utc, dt_format)
  374. if not vevent:
  375. # Create new instance from recurrence
  376. vevent = copy.deepcopy(vevent_recurrence)
  377. # For all day events, the system timezone may influence the
  378. # results, so use recurrence_dt
  379. recurrence_id = recurrence_dt if all_day_event else recurrence_utc
  380. logger.debug("Creating new VEVENT with RECURRENCE-ID: %s", recurrence_id)
  381. vevent.recurrence_id = ContentLine(
  382. name='RECURRENCE-ID',
  383. value=recurrence_id, params={}
  384. )
  385. _convert_to_utc(vevent, 'recurrence_id', dt_format)
  386. vevent.dtstart = ContentLine(
  387. name='DTSTART',
  388. value=recurrence_id.strftime(dt_format), params={}
  389. )
  390. # if there is a DTEND, override it. Duration does not need changing
  391. if hasattr(vevent, "dtend"):
  392. vevent.dtend = ContentLine(
  393. name='DTEND',
  394. value=(recurrence_id + duration).strftime(dt_format), params={}
  395. )
  396. filtered_vevents.append(vevent)
  397. # Filter overridden and recurrence base events
  398. if time_range_start is not None and time_range_end is not None:
  399. for vevent in vevents_overridden:
  400. dtstart = vevent.dtstart.value
  401. # Handle string values for DTSTART/DTEND
  402. if isinstance(dtstart, str):
  403. try:
  404. dtstart = datetime.datetime.strptime(dtstart, dt_format)
  405. if all_day_event:
  406. dtstart = dtstart.date()
  407. except ValueError as e:
  408. logger.warning("Invalid DTSTART format: %s, error: %s", dtstart, e)
  409. continue
  410. dtend = dtstart + duration if duration else dtstart
  411. logger.debug(
  412. "Filtering VEVENT with DTSTART: %s (type: %s), DTEND: %s (type: %s)",
  413. dtstart, type(dtstart), dtend, type(dtend))
  414. # Convert to datetime for comparison
  415. if all_day_event and isinstance(dtstart, datetime.date) and not isinstance(dtstart, datetime.datetime):
  416. dtstart = datetime.datetime.fromordinal(dtstart.toordinal()).replace(tzinfo=None)
  417. dtend = datetime.datetime.fromordinal(dtend.toordinal()).replace(tzinfo=None)
  418. elif not all_day_event and isinstance(dtstart, datetime.datetime) \
  419. and isinstance(dtend, datetime.datetime):
  420. dtstart = dtstart.replace(tzinfo=datetime.timezone.utc)
  421. dtend = dtend.replace(tzinfo=datetime.timezone.utc)
  422. else:
  423. logger.warning("Unexpected DTSTART/DTEND type: dtstart=%s, dtend=%s", type(dtstart), type(dtend))
  424. continue
  425. if dtstart < time_range_end and dtend > time_range_start:
  426. if vevent not in filtered_vevents: # Avoid duplicates
  427. logger.debug("VEVENT passed time-range filter: %s", dtstart)
  428. filtered_vevents.append(vevent)
  429. else:
  430. logger.debug("VEVENT filtered out: %s", dtstart)
  431. # Rebuild component
  432. if not filtered_vevents:
  433. element.text = ""
  434. return element
  435. else:
  436. vevent_component.vevent_list = filtered_vevents
  437. logger.debug("lbt: vevent_component %s", vevent_component)
  438. element.text = vevent_component.serialize()
  439. return element
  440. def _convert_timezone(vevent: vobject.icalendar.RecurringComponent,
  441. name_prop: str,
  442. name_content_line: str):
  443. prop = getattr(vevent, name_prop, None)
  444. if prop:
  445. if type(prop.value) is datetime.date:
  446. date_time = datetime.datetime.fromordinal(
  447. prop.value.toordinal()
  448. ).replace(tzinfo=datetime.timezone.utc)
  449. else:
  450. date_time = prop.value.astimezone(datetime.timezone.utc)
  451. setattr(vevent, name_prop, ContentLine(name=name_content_line, value=date_time, params=[]))
  452. def _convert_to_utc(vevent: vobject.icalendar.RecurringComponent,
  453. name_prop: str,
  454. dt_format: str):
  455. prop = getattr(vevent, name_prop, None)
  456. if prop:
  457. setattr(vevent, name_prop, ContentLine(name=prop.name, value=prop.value.strftime(dt_format), params=[]))
  458. def _strip_single_event(vevent: vobject.icalendar.RecurringComponent, dt_format: str) -> None:
  459. _convert_timezone(vevent, 'dtstart', 'DTSTART')
  460. _convert_timezone(vevent, 'dtend', 'DTEND')
  461. _convert_timezone(vevent, 'recurrence_id', 'RECURRENCE-ID')
  462. # There is something strange behaviour during serialization native datetime, so converting manually
  463. _convert_to_utc(vevent, 'dtstart', dt_format)
  464. _convert_to_utc(vevent, 'dtend', dt_format)
  465. _convert_to_utc(vevent, 'recurrence_id', dt_format)
  466. try:
  467. delattr(vevent, 'rrule')
  468. delattr(vevent, 'exdate')
  469. delattr(vevent, 'exrule')
  470. delattr(vevent, 'rdate')
  471. except AttributeError:
  472. pass
  473. def _strip_component(vevent: vobject.base.Component) -> None:
  474. timezones_to_remove = []
  475. for component in vevent.components():
  476. if component.name == 'VTIMEZONE':
  477. timezones_to_remove.append(component)
  478. for timezone in timezones_to_remove:
  479. vevent.remove(timezone)
  480. def _split_overridden_vevents(
  481. component: vobject.base.Component,
  482. ) -> Tuple[
  483. vobject.icalendar.RecurringComponent,
  484. List[vobject.icalendar.RecurringComponent]
  485. ]:
  486. vevent_recurrence = None
  487. vevents_overridden = []
  488. for vevent in component.vevent_list:
  489. if hasattr(vevent, 'recurrence_id'):
  490. vevents_overridden += [vevent]
  491. elif vevent_recurrence:
  492. raise ValueError(
  493. f"component with UID {vevent.uid} "
  494. f"has more than one vevent with recurrence information"
  495. )
  496. else:
  497. vevent_recurrence = vevent
  498. if vevent_recurrence:
  499. return (
  500. vevent_recurrence, sorted(
  501. vevents_overridden,
  502. key=lambda vevent: vevent.recurrence_id.value
  503. )
  504. )
  505. else:
  506. raise ValueError(
  507. f"component with UID {vevent.uid} "
  508. f"does not have a vevent without a recurrence_id"
  509. )
  510. def _find_overridden(
  511. start: int,
  512. vevents: List[vobject.icalendar.RecurringComponent],
  513. dt: datetime.datetime,
  514. dt_format: str
  515. ) -> Tuple[int, Optional[vobject.icalendar.RecurringComponent]]:
  516. for i in range(start, len(vevents)):
  517. dt_event = datetime.datetime.strptime(
  518. vevents[i].recurrence_id.value,
  519. dt_format
  520. ).replace(tzinfo=datetime.timezone.utc)
  521. if dt_event == dt:
  522. return (i + 1, vevents[i])
  523. return (start, None)
  524. def xml_item_response(base_prefix: str, href: str,
  525. found_props: Sequence[ET.Element] = (),
  526. not_found_props: Sequence[ET.Element] = (),
  527. found_item: bool = True) -> ET.Element:
  528. response = ET.Element(xmlutils.make_clark("D:response"))
  529. href_element = ET.Element(xmlutils.make_clark("D:href"))
  530. href_element.text = xmlutils.make_href(base_prefix, href)
  531. response.append(href_element)
  532. if found_item:
  533. for code, props in ((200, found_props), (404, not_found_props)):
  534. if props:
  535. propstat = ET.Element(xmlutils.make_clark("D:propstat"))
  536. status = ET.Element(xmlutils.make_clark("D:status"))
  537. status.text = xmlutils.make_response(code)
  538. prop_element = ET.Element(xmlutils.make_clark("D:prop"))
  539. for prop in props:
  540. prop_element.append(prop)
  541. propstat.append(prop_element)
  542. propstat.append(status)
  543. response.append(propstat)
  544. else:
  545. status = ET.Element(xmlutils.make_clark("D:status"))
  546. status.text = xmlutils.make_response(404)
  547. response.append(status)
  548. return response
  549. def retrieve_items(
  550. base_prefix: str, path: str, collection: storage.BaseCollection,
  551. hreferences: Iterable[str], filters: Sequence[ET.Element],
  552. multistatus: ET.Element) -> Iterator[Tuple[radicale_item.Item, bool]]:
  553. """Retrieves all items that are referenced in ``hreferences`` from
  554. ``collection`` and adds 404 responses for missing and invalid items
  555. to ``multistatus``."""
  556. collection_requested = False
  557. def get_names() -> Iterator[str]:
  558. """Extracts all names from references in ``hreferences`` and adds
  559. 404 responses for invalid references to ``multistatus``.
  560. If the whole collections is referenced ``collection_requested``
  561. gets set to ``True``."""
  562. nonlocal collection_requested
  563. for hreference in hreferences:
  564. try:
  565. name = pathutils.name_from_path(hreference, collection)
  566. except ValueError as e:
  567. logger.warning("Skipping invalid path %r in REPORT request on "
  568. "%r: %s", hreference, path, e)
  569. response = xml_item_response(base_prefix, hreference,
  570. found_item=False)
  571. multistatus.append(response)
  572. continue
  573. if name:
  574. # Reference is an item
  575. yield name
  576. else:
  577. # Reference is a collection
  578. collection_requested = True
  579. for name, item in collection.get_multi(get_names()):
  580. if not item:
  581. uri = pathutils.unstrip_path(posixpath.join(collection.path, name))
  582. response = xml_item_response(base_prefix, uri, found_item=False)
  583. multistatus.append(response)
  584. else:
  585. yield item, False
  586. if collection_requested:
  587. yield from collection.get_filtered(filters)
  588. def test_filter(collection_tag: str, item: radicale_item.Item,
  589. filter_: ET.Element) -> bool:
  590. """Match an item against a filter."""
  591. if (collection_tag == "VCALENDAR" and
  592. filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
  593. if len(filter_) == 0:
  594. return True
  595. if len(filter_) > 1:
  596. raise ValueError("Filter with %d children" % len(filter_))
  597. if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
  598. raise ValueError("Unexpected %r in filter" % filter_[0].tag)
  599. return radicale_filter.comp_match(item, filter_[0])
  600. if (collection_tag == "VADDRESSBOOK" and
  601. filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
  602. for child in filter_:
  603. if child.tag != xmlutils.make_clark("CR:prop-filter"):
  604. raise ValueError("Unexpected %r in filter" % child.tag)
  605. test = filter_.get("test", "anyof")
  606. if test == "anyof":
  607. return any(radicale_filter.prop_match(item.vobject_item, f, "CR")
  608. for f in filter_)
  609. if test == "allof":
  610. return all(radicale_filter.prop_match(item.vobject_item, f, "CR")
  611. for f in filter_)
  612. raise ValueError("Unsupported filter test: %r" % test)
  613. raise ValueError("Unsupported filter %r for %r" %
  614. (filter_.tag, collection_tag))
  615. class ApplicationPartReport(ApplicationBase):
  616. def do_REPORT(self, environ: types.WSGIEnviron, base_prefix: str,
  617. path: str, user: str) -> types.WSGIResponse:
  618. """Manage REPORT request."""
  619. access = Access(self._rights, user, path)
  620. if not access.check("r"):
  621. return httputils.NOT_ALLOWED
  622. try:
  623. xml_content = self._read_xml_request_body(environ)
  624. except RuntimeError as e:
  625. logger.warning("Bad REPORT request on %r: %s", path, e,
  626. exc_info=True)
  627. return httputils.BAD_REQUEST
  628. except socket.timeout:
  629. logger.debug("Client timed out", exc_info=True)
  630. return httputils.REQUEST_TIMEOUT
  631. with contextlib.ExitStack() as lock_stack:
  632. lock_stack.enter_context(self._storage.acquire_lock("r", user))
  633. item = next(iter(self._storage.discover(path)), None)
  634. if not item:
  635. return httputils.NOT_FOUND
  636. if not access.check("r", item):
  637. return httputils.NOT_ALLOWED
  638. if isinstance(item, storage.BaseCollection):
  639. collection = item
  640. else:
  641. assert item.collection is not None
  642. collection = item.collection
  643. if xml_content is not None and \
  644. xml_content.tag == xmlutils.make_clark("C:free-busy-query"):
  645. max_occurrence = self.configuration.get("reporting", "max_freebusy_occurrence")
  646. try:
  647. status, body = free_busy_report(
  648. base_prefix, path, xml_content, collection, self._encoding,
  649. lock_stack.close, max_occurrence)
  650. except ValueError as e:
  651. logger.warning(
  652. "Bad REPORT request on %r: %s", path, e, exc_info=True)
  653. return httputils.BAD_REQUEST
  654. headers = {"Content-Type": "text/calendar; charset=%s" % self._encoding}
  655. return status, headers, str(body)
  656. else:
  657. try:
  658. status, xml_answer = xml_report(
  659. base_prefix, path, xml_content, collection, self._encoding,
  660. lock_stack.close)
  661. except ValueError as e:
  662. logger.warning(
  663. "Bad REPORT request on %r: %s", path, e, exc_info=True)
  664. return httputils.BAD_REQUEST
  665. headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
  666. return status, headers, self._xml_response(xml_answer)