diff --git a/.github/workflows/linkcheck.yml b/.github/workflows/linkcheck.yml index d20f94f2..5d4adac1 100644 --- a/.github/workflows/linkcheck.yml +++ b/.github/workflows/linkcheck.yml @@ -12,7 +12,7 @@ jobs: with: fail: true args: >- - --timeout 10 - --max-retries 2 + --timeout 20 + --max-retries 3 '**/*.md' '**/*.rst' diff --git a/.gitignore b/.gitignore index b0c9a9ac..bbbc3491 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,5 @@ tests/docker-test-servers/baikal/baikal-backup/ tests/docker-test-servers/*/baikal-backup/ # But keep the pre-configured Specific directory for Baikal !tests/docker-test-servers/baikal/Specific/ +# Local test server configuration (may contain credentials) +tests/caldav_test_servers.yaml diff --git a/AI-POLICY.md b/AI-POLICY.md index 2c22a1f7..335f9a9e 100644 --- a/AI-POLICY.md +++ b/AI-POLICY.md @@ -1,28 +1,59 @@ # Policy on usage of Artifical Intelligence and other tools +## Read this first + +The most important rule: Inform about it! + +If you've spent hours, perhaps a full day of your time writing up a +pull request, then I sort of owe you something. I should spend some +of my time looking through the submission carefully, and if nothing +else, I owe to be polite, respectful and guide you in the right +direction or give a good explanation for why I think your pull request +is pulling the project in the wrong direction. A human being have +feelings, I should be careful not to hurt your feelings. + +At the other hand, perhaps you've spent 30 seconds either doing `ruff +check --fix ; gh pr create` or telling Claude to check what went wrong +in the logs and submit a bugfix upstream. Do I still owe +you to spend time looking through the submission carefully and +spending time being polite and caring about your feelings? + +Perhaps your pull request is just one out of many such "drive-by pull +requests". It doesn't scale for a maintainer to spent lots of time on +each such pull request. I should just accept or decline such requests +rapidly with minimum effort. + +So it all boils down to this: Be honest about tool usage! + ## Background -From time to time I do get pull requests where the author has done -little else than running some tool on the code and submitting it as a -pull request. Those pull requests may have value to the project, but -it's dishonest to not be transparent about it; teaching me how to run -the tool and integrating it into the CI workflow may have a bigger -value than the changes provided by the tool. Recently I've also -started receiving pull requests with code changes generated by AI (and -I've seen people posting screenshots of simple questions and answers -from ChatGPT in forum discussions, without contributing anything else). +The "30 second effort pull request" mentioned above may have value to +the project, but it's dishonest to not be transparent about it. +Sometimes, teaching me how to run the tool and integrating it into the +CI workflow may have a bigger value than the changes provided by the +tool. Starting in 2025-11, I've spent quite some time testing Claude. I'm positively surprised, it's doing a much better job than what I had expected. The AI may do things a lot faster, smarter and better than a good coder. Sometimes. Other times it may spend a lot of "tokens" -and a long time coming up with sub-optimal solutions, or even -solutions that doesn't work at all. Perhaps at some time in the near -future the AI will do the developer profession completely obsoleted - -but as of 2026-02, my experiences is that the AI performs best when -being "supervised" and "guided" by a good coder knowing the project. +and a long time coming up with sub-optimal or really bad solutions. + +Perhaps at some time in the near future the AI will do the developer +profession completely obsoleted - but as of 2026-02, my experiences is +that the AI performs best when being "supervised" and "guided" by a +good coder knowing the project. + +## Bugfixes are (most often) welcome + +Over the past month, playing with a "max" subscription with Claude, +I've made it into a rule that when I stumble upon some weird bug in +some software or libraries I'm using or dependent on, I always ask +Claude to analyze the bug, check the outstanding issues in the +project, either create a new issue or consider if there is anything of +value to add to an existing issue, and come up with a pull-request. Being a bit aware of the -## The rules +## General rules * Do **respect the maintainers time**. If/when the maintainer gets overwhelmed by pull requests of questionable quality or pull diff --git a/CHANGELOG.md b/CHANGELOG.md index b724028a..a9d7c142 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,10 +20,13 @@ This project should adhere to [Semantic Versioning](https://semver.org/spec/v2.0 Version 3.0 should be fully backward-compatible with version 2.x - but there are massive code changes in version 3.0: -* "Black style" has been replaced with ruff. This causes quite some changes in the code. -* Version 3.0 introduces **full async support** using a Sans-I/O architecture. The same domain objects (Calendar, Event, Todo, etc.) now work with both synchronous and asynchronous clients. The async client uses niquests by default; httpx is also supported for projects that already have it as a dependency. -* Quite some refactoring work has been done -* Some work has been put down ensuring better consistency in the method names. Version 3.0 should be backward-compatible with version 2.0, so the old methods still work, but are deprecated. +* **Full async support** using a Sans-I/O architecture. The same domain objects (Calendar, Event, Todo, etc.) now work with both synchronous and asynchronous clients. The async client uses niquests by default; httpx is also supported for projects that already have it as a dependency. +* **Sans-I/O architecture** -- internal refactoring separates protocol logic (XML building/parsing) from I/O into a layered architecture: protocol layer (`caldav/protocol/`), operations layer (`caldav/operations/`), and response handling (`caldav/response.py`). This enables code reuse between sync and async implementations and improves testability. +* **Lazy imports** -- `import caldav` is now significantly faster due to PEP 562 lazy loading. Heavy dependencies (lxml, niquests, icalendar) are deferred until first use. (PR #621) +* **API naming consistency** -- methods have been renamed for consistency. Server-fetching methods use `get_` prefix, capability checks use `supports_*()`. Old method names still work but are deprecated. +* **Ruff replaces Black** -- code formatting now uses ruff instead of Black, causing cosmetic changes throughout the codebase. +* **Expanded compatibility hints** -- server-specific workarounds added for Zimbra, Bedework, CCS (Apple CalendarServer), Davis, DAViCal, GMX, ecloud, Synology, Posteo, PurelyMail, and more. +* Quite some other refactoring work has been done. ### Breaking Changes @@ -31,6 +34,7 @@ Version 3.0 should be fully backward-compatible with version 2.x - but there are * **Minimum Python version**: Python 3.10+ is now required (was 3.8+). * **Test Server Configuration**: `tests/conf.py` has been removed and `conf_private.py` will be ignored. See the Test Framework section below. +* **Config file parse errors now raise exceptions**: `caldav.config.read_config()` now raises `ValueError` on YAML/JSON parse errors instead of logging and returning an empty dict. This ensures config errors are detected early. ### Deprecated @@ -69,7 +73,7 @@ Additionally, direct `DAVClient()` instantiation should migrate to `get_davclien ### Added -* **Full async API** - New `AsyncDAVClient` and async-compatible domain objects: +* **Full async API** -- New `AsyncDAVClient` and async-compatible domain objects: ```python from caldav.async_davclient import get_davclient @@ -79,45 +83,99 @@ Additionally, direct `DAVClient()` instantiation should migrate to `get_davclien for cal in calendars: events = await cal.get_events() ``` -* **Sans-I/O architecture** - Internal refactoring separates protocol logic from I/O: - - Protocol layer (`caldav/protocol/`): Pure functions for XML building/parsing - - Operations layer (`caldav/operations/`): High-level CalDAV operations - - This enables code reuse between sync and async implementations +* **Sans-I/O architecture** -- Internal refactoring separates protocol logic from I/O: + - Protocol layer (`caldav/protocol/`): Pure functions for XML building/parsing with typed dataclasses (DAVRequest, DAVResponse, PropfindResult, CalendarQueryResult) + - Operations layer (`caldav/operations/`): Sans-I/O business logic for CalDAV operations (properties, search, calendar management, principal discovery) + - Response layer (`caldav/response.py`): Shared `BaseDAVResponse` for sync/async + - Data state (`caldav/datastate.py`): Strategy pattern for managing data representations (raw string, icalendar, vobject) -- avoids unnecessary parse/serialize cycles +* **Lazy imports (PEP 562)** -- `import caldav` is now fast. Heavy dependencies (lxml, niquests, icalendar) are deferred until first use. https://github.com/python-caldav/caldav/pull/621 +* **`DAVObject.name` deprecated** -- use `get_display_name()` instead. The old `.name` property now emits `DeprecationWarning`. * Added python-dateutil and PyYAML as explicit dependencies (were transitive) * Quite some methods have been renamed for consistency and to follow best current practices. See the deprecation section. * `Calendar` class now accepts a `name` parameter in its constructor, addressing a long-standing API inconsistency (https://github.com/python-caldav/caldav/issues/128) -* **Data representation API** - New efficient data access via `CalendarObjectResource` properties (https://github.com/python-caldav/caldav/issues/613): - - `.icalendar_instance` - parsed icalendar object (lazy loaded) - - `.vobject_instance` - parsed vobject object (lazy loaded) - - `.data` - raw iCalendar string +* **Data representation API** -- New efficient data access via `CalendarObjectResource` properties (https://github.com/python-caldav/caldav/issues/613): + - `.icalendar_instance` -- parsed icalendar object (lazy loaded) + - `.vobject_instance` -- parsed vobject object (lazy loaded) + - `.data` -- raw iCalendar string + - Context managers `edit_icalendar_instance()` and `edit_vobject_instance()` for safe mutable access + - `get_data()`, `get_icalendar_instance()`, `get_vobject_instance()` return copies for read-only access - Internal `DataState` class manages caching between formats -* **CalendarObjectResource.id property** - Returns the UID of calendar objects (https://github.com/python-caldav/caldav/issues/515) -* **calendar.searcher() API** - Factory method for advanced search queries (https://github.com/python-caldav/caldav/issues/590): +* **CalendarObjectResource.id property** -- Returns the UID of calendar objects (https://github.com/python-caldav/caldav/issues/515) +* **calendar.searcher() API** -- Factory method for advanced search queries (https://github.com/python-caldav/caldav/issues/590): ```python searcher = calendar.searcher() searcher.add_filter(...) results = searcher.search() ``` +* **`get_calendars()` and `get_calendar()` context managers** -- Module-level factory functions that create a client, fetch calendars, and clean up on exit: + ```python + with get_calendars(url="...", username="...", password="...") as calendars: + for cal in calendars: + ... + ``` +* **Base+override feature profiles** -- YAML config now supports inheriting from base feature profiles: + ```yaml + my-server: + features: + base: nextcloud + search.comp-type: unsupported + ``` +* **Feature validation** -- `caldav.config` now validates feature configuration and raises errors on unknown feature names +* **URL space validation** -- `caldav.lib.url` now validates that URLs don't contain unquoted spaces +* **Fallback for missing calendar-home-set** -- Client falls back to principal URL when `calendar-home-set` property is not available +* **Load fallback for changed URLs** -- `CalendarObjectResource.load()` falls back to UID-based lookup when servers change URLs after save ### Fixed * RFC 4791 compliance: Don't send Depth header for calendar-multiget REPORT (clients SHOULD NOT send it, but servers MUST ignore it per §7.9) +* Fixed `ssl_verify_cert` not passed through in `get_sync_client` and `get_async_client` +* Fixed `_derive_from_subfeatures` partial-config derivation bug +* Fixed feature name parsing when names include `compatibility_hints.` prefix +* Fixed recursive `_search_with_comptypes` when `search.comp-type` is broken +* Fixed pending todo search on servers with broken comp-type filtering +* Fixed URL path quoting when extracting calendars from PROPFIND results +* Removed spurious warning on URL path mismatch, deduplicated `get_properties` +* Fixed `create-calendar` feature incorrectly derived as unsupported +* Fixed various async test issues (awaiting sync calls, missing feature checks, authorization error handling) +* Fixed `search.category` features to use correct `search.text.category` names ### Changed * Sync client (`DAVClient`) now shares common code with async client via `BaseDAVClient` * Response handling unified in `BaseDAVResponse` class +* Search refactored to use generator-based Sans-I/O pattern -- `_search_impl` yields `(SearchAction, data)` tuples consumed by sync or async wrappers * Test configuration migrated from legacy `tests/conf.py` to new `tests/test_servers/` framework +* Configuration system expanded: `get_connection_params()` provides unified config discovery with clear priority (explicit params > test server config > env vars > config file) +* `${VAR}` and `${VAR:-default}` environment variable expansion in config values +* Ruff replaces Black for code formatting +* `caldav/objects.py` backward-compatibility shim removed (imports go directly to submodules) ### Test Framework -* Fixed Nextcloud Docker test server tmpfs permissions race condition -* Added deptry for dependency verification in CI -* The test server framework has been refactored with a new `tests/test_servers/` module. It provides **YAML-based server configuration**: see `tests/test_servers/__init__.py` for usage +* **New `tests/test_servers/` module** -- Complete rewrite of test infrastructure: + - `TestServer` base class hierarchy (EmbeddedTestServer, DockerTestServer, ExternalTestServer) + - YAML-based server configuration (`tests/caldav_test_servers.yaml.example`) + - `ServerRegistry` for server discovery and lifecycle management + - `client_context()` and `has_test_servers()` helpers +* **New Docker test servers**: CCS (Apple CalendarServer), DAViCal, Davis, Zimbra +* **Updated Docker configs**: Baikal, Cyrus, Nextcloud, SOGo * Added pytest-asyncio for async test support +* Added deptry for dependency verification in CI +* Added lychee link-check workflow +* Added `convert_conf_private.py` migration tool for old config format +* Removed `tests/conf.py`, `tests/conf_baikal.py`, `tests/conf_private.py.EXAMPLE` +* **New test suites**: + - `test_async_davclient.py` (821 lines) -- Async client unit tests + - `test_async_integration.py` (466 lines) -- Async integration tests + - `test_operations_*.py` (6 files) -- Operations layer unit tests + - `test_protocol.py` (319 lines) -- Protocol layer unit tests + - `test_lazy_import.py` (141 lines) -- PEP 562 lazy import verification +* Fixed Nextcloud Docker test server tmpfs permissions race condition ### GitHub Pull Requests Merged +* #621 - Lazy-load heavy dependencies to speed up import caldav +* #622 - Fix overlong inline literal, replace hyphens with en-dashes * #607 - Add deptry for dependency verification ### GitHub Issues Closed @@ -130,7 +188,25 @@ Additionally, direct `DAVClient()` instantiation should migrate to `get_davclien ### Security -Nothing to report. +* UUID1 usage in UID generation (`calendarobject_ops.py`) may embed the host MAC address in calendar UIDs. Since calendar events are shared with third parties, this is a privacy concern. Planned fix: switch to UUID4. + +### Compatibility Hints Expanded + +Server-specific workarounds have been significantly expanded. Profiles added or updated for: + +* **Zimbra** -- search.is-not-defined, delete-calendar, recurrences.count, case-sensitive search +* **Bedework** -- save-load.journal, save-load.todo.recurrences.thisandfuture, search.recurrences.expanded.todo, search.time-range.alarm +* **CCS (Apple CalendarServer)** -- save-load.journal unsupported, various search hints +* **Davis** -- principal-search at parent level, mixed-calendar features +* **GMX** -- rate-limit, basepath correction +* **ecloud** -- create-calendar unsupported, search.is-not-defined, case-sensitive +* **Synology** -- is-not-defined, wipe-calendar cleanup +* **SOGo** -- save-load.journal ungraceful, case-insensitive, delete-calendar +* **Posteo** -- search.combined-is-logical-and unsupported +* **PurelyMail** -- search.time-range.todo ungraceful +* **DAViCal** -- various search and sync hints +* **Xandikos** -- freebusy-query now supported in v0.3.3 +* **Baikal/Radicale** -- case-sensitive search, principal-search features ## [2.2.6] - [2026-02-01] diff --git a/MANIFEST.in b/MANIFEST.in index 5758534d..1db18291 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,3 +2,5 @@ include COPYING.* include *.md recursive-include tests caldav exclude tests/conf_private.py +exclude tests/caldav_test_servers.yaml +exclude tests/tmp_caldav_test_servers.yaml diff --git a/RELEASE-HOWTO.md b/RELEASE-HOWTO.md index a5c5244f..37e34fe7 100644 --- a/RELEASE-HOWTO.md +++ b/RELEASE-HOWTO.md @@ -45,5 +45,5 @@ This is most likely not complete, but should explain some of the "silly" steps a * Forgetting to add new files to the git repo * Having checked out a branch or tag or something, and tagging that as the new release rather than the latest HEAD. * Forgetting to push to pypi, or pushing something else than the tagged revision to pypi -* Pushing out junk files in the pypi-release (i.e. .pyc-files, log files, temp files, `tests/conf_private.py`, etc +* Pushing out junk files in the pypi-release (i.e. .pyc-files, log files, temp files, `tests/conf_private.py`, `tests/caldav_test_servers.yaml`, etc * Not adding the release to the "github releases" (I don't care much about this feature, but apparently some people check there to find the latest release version) diff --git a/caldav/__init__.py b/caldav/__init__.py index 1de3e2bc..8167d3e0 100644 --- a/caldav/__init__.py +++ b/caldav/__init__.py @@ -1,4 +1,15 @@ #!/usr/bin/env python +""" +caldav — CalDAV client library for Python. + +Heavy dependencies (niquests, icalendar, lxml) are loaded lazily on first +use via PEP 562 module-level ``__getattr__``. This keeps ``import caldav`` +fast even on constrained hardware. +""" + +from __future__ import annotations + +import importlib import logging try: @@ -8,16 +19,10 @@ import warnings warnings.warn( - "You need to install the `build` package and do a `python -m build` to get caldav.__version__ set correctly" + "You need to install the `build` package and do a `python -m build` " + "to get caldav.__version__ set correctly" ) -from .davclient import DAVClient, get_calendar, get_calendars, get_davclient -## TODO: this should go away in some future version of the library. -from .objects import * -from .search import CalDAVSearcher - -## We should consider if the NullHandler-logic below is needed or not, and -## if there are better alternatives? # Silence notification of no default logging handler log = logging.getLogger("caldav") @@ -29,4 +34,65 @@ def emit(self, record) -> None: log.addHandler(NullHandler()) -__all__ = ["__version__", "DAVClient", "get_davclient", "get_calendars", "get_calendar"] +# --------------------------------------------------------------------------- +# Lazy import machinery (PEP 562) +# --------------------------------------------------------------------------- +# Maps public attribute names to the *caldav* submodule that provides them. +_LAZY_IMPORTS: dict[str, str] = { + # davclient + "DAVClient": "caldav.davclient", + "get_calendar": "caldav.davclient", + "get_calendars": "caldav.davclient", + "get_davclient": "caldav.davclient", + # base_client + "CalendarCollection": "caldav.base_client", + "CalendarResult": "caldav.base_client", + # collection + "Calendar": "caldav.collection", + "CalendarSet": "caldav.collection", + "Principal": "caldav.collection", + "ScheduleMailbox": "caldav.collection", + "ScheduleInbox": "caldav.collection", + "ScheduleOutbox": "caldav.collection", + "SynchronizableCalendarObjectCollection": "caldav.collection", + # davobject + "DAVObject": "caldav.davobject", + # calendarobjectresource + "CalendarObjectResource": "caldav.calendarobjectresource", + "Event": "caldav.calendarobjectresource", + "Todo": "caldav.calendarobjectresource", + "Journal": "caldav.calendarobjectresource", + "FreeBusy": "caldav.calendarobjectresource", + # search + "CalDAVSearcher": "caldav.search", +} + +# Submodules accessible as attributes (e.g. ``caldav.error``). +_LAZY_SUBMODULES: set[str] = {"error"} + +__all__ = [ + "__version__", + *_LAZY_IMPORTS, +] + + +def __getattr__(name: str) -> object: + if name in _LAZY_IMPORTS: + module = importlib.import_module(_LAZY_IMPORTS[name]) + attr = getattr(module, name) + # Cache on the module so __getattr__ is not called again. + globals()[name] = attr + return attr + + if name in _LAZY_SUBMODULES: + module = importlib.import_module(f"caldav.lib.{name}") + globals()[name] = module + return module + + raise AttributeError(f"module 'caldav' has no attribute {name!r}") + + +def __dir__() -> list[str]: + # Expose lazy names alongside the eagerly-defined ones. + eager = list(globals()) + return sorted(set(eager + list(_LAZY_IMPORTS) + list(_LAZY_SUBMODULES))) diff --git a/caldav/async_davclient.py b/caldav/async_davclient.py index 42ab6af8..2f706caf 100644 --- a/caldav/async_davclient.py +++ b/caldav/async_davclient.py @@ -6,6 +6,7 @@ For sync usage, see the davclient.py wrapper. """ +import logging import sys from collections.abc import Mapping from types import TracebackType @@ -59,7 +60,6 @@ from caldav.lib import error from caldav.lib.python_utilities import to_normal_str, to_wire from caldav.lib.url import URL -from caldav.objects import log from caldav.protocol.types import ( CalendarQueryResult, PropfindResult, @@ -78,6 +78,8 @@ from caldav.requests import HTTPBearerAuth from caldav.response import BaseDAVResponse +log = logging.getLogger("caldav") + if sys.version_info < (3, 11): from typing_extensions import Self else: @@ -163,10 +165,9 @@ def __init__( """ headers = headers or {} - if isinstance(features, str): - import caldav.compatibility_hints + from caldav.config import resolve_features - features = getattr(caldav.compatibility_hints, features) + features = resolve_features(features) if isinstance(features, FeatureSet): self.features = features else: @@ -936,7 +937,7 @@ async def get_calendars(self, principal: Optional["Principal"] = None) -> list[" principal = await client.get_principal() calendars = await client.get_calendars(principal) for cal in calendars: - print(f"Calendar: {cal.name}") + print(f"Calendar: {cal.get_display_name()}") """ from caldav.collection import Calendar from caldav.operations.calendarset_ops import ( diff --git a/caldav/base_client.py b/caldav/base_client.py index 494fc2b4..a20a9a4b 100644 --- a/caldav/base_client.py +++ b/caldav/base_client.py @@ -141,6 +141,123 @@ def build_auth_object(self, auth_types: list[str] | None = None) -> None: pass +class CalendarCollection(list): + """ + A list of calendars that can be used as a context manager. + + This class extends list to provide automatic cleanup of the underlying + DAV client connection when used with a `with` statement. + + Example:: + + from caldav import get_calendars + + # As context manager (recommended) - auto-closes connection + with get_calendars(url="...", username="...", password="...") as calendars: + for cal in calendars: + print(cal.get_display_name()) + + # Without context manager - must close manually + calendars = get_calendars(url="...", username="...", password="...") + # ... use calendars ... + if calendars: + calendars[0].client.close() + """ + + def __init__(self, calendars: list | None = None, client: Any = None): + super().__init__(calendars or []) + self._client = client + + @property + def client(self): + """The underlying DAV client, if available.""" + if self._client: + return self._client + # Fall back to getting client from first calendar + if self: + return self[0].client + return None + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + return False + + def close(self): + """Close the underlying DAV client connection.""" + if self._client: + self._client.close() + elif self: + self[0].client.close() + + +class CalendarResult: + """ + A single calendar result that can be used as a context manager. + + This wrapper holds a single Calendar (or None) and provides automatic + cleanup of the underlying DAV client connection when used with a + `with` statement. + + Example:: + + from caldav import get_calendar + + # As context manager (recommended) - auto-closes connection + with get_calendar(calendar_name="Work", url="...") as calendar: + if calendar: + events = calendar.date_search(start=..., end=...) + + # Without context manager + result = get_calendar(calendar_name="Work", url="...") + calendar = result.calendar # or just use result directly + # ... use calendar ... + result.close() + """ + + def __init__(self, calendar: Any = None, client: Any = None): + self._calendar = calendar + self._client = client + + @property + def calendar(self): + """The calendar, or None if not found.""" + return self._calendar + + @property + def client(self): + """The underlying DAV client.""" + if self._client: + return self._client + if self._calendar: + return self._calendar.client + return None + + def __enter__(self): + return self._calendar + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + return False + + def close(self): + """Close the underlying DAV client connection.""" + client = self.client + if client: + client.close() + + # Allow using the result directly as if it were the calendar + def __bool__(self): + return self._calendar is not None + + def __getattr__(self, name): + if self._calendar is None: + raise AttributeError(f"No calendar found, cannot access '{name}'") + return getattr(self._calendar, name) + + def _normalize_to_list(obj: Any) -> list: """Convert a string or None to a list for uniform handling.""" if not obj: @@ -162,7 +279,7 @@ def get_calendars( name: str | None = None, raise_errors: bool = False, **config_data, -) -> list[Calendar]: +) -> CalendarCollection: """ Get calendars from a CalDAV server with configuration from multiple sources. @@ -170,6 +287,9 @@ def get_calendars( calendar objects based on the specified criteria. Configuration is read from various sources (explicit parameters, environment variables, config files). + The returned CalendarCollection can be used as a context manager to ensure + the underlying connection is properly closed. + Args: client_class: The client class to use (DAVClient or AsyncDAVClient). calendar_url: URL(s) or ID(s) of specific calendars to fetch. @@ -187,22 +307,20 @@ def get_calendars( **config_data: Connection parameters (url, username, password, etc.) Returns: - List of Calendar objects matching the criteria. + CalendarCollection of Calendar objects matching the criteria. If no calendar_url or calendar_name specified, returns all calendars. Example:: - from caldav import DAVClient - from caldav.base_client import get_calendars - - # Get all calendars - calendars = get_calendars(DAVClient, url="https://...", username="...", password="...") + from caldav import get_calendars - # Get specific calendars by name - calendars = get_calendars(DAVClient, calendar_name=["Work", "Personal"], ...) + # As context manager (recommended) + with get_calendars(url="https://...", username="...", password="...") as calendars: + for cal in calendars: + print(cal.get_display_name()) - # Get specific calendar by URL or ID - calendars = get_calendars(DAVClient, calendar_url="/calendars/user/work/", ...) + # Without context manager - connection closed on garbage collection + calendars = get_calendars(url="https://...", username="...", password="...") """ import logging @@ -236,12 +354,12 @@ def _try(meth, kwargs, errmsg): if client is None: if raise_errors: raise ValueError("Could not create DAV client - no configuration found") - return [] + return CalendarCollection() # Get principal principal = _try(client.principal, {}, "getting principal") if not principal: - return [] + return CalendarCollection(client=client) calendars = [] calendar_urls = _normalize_to_list(calendar_url) @@ -274,7 +392,7 @@ def _try(meth, kwargs, errmsg): if all_cals: calendars = all_cals - return calendars + return CalendarCollection(calendars, client=client) def get_davclient( diff --git a/caldav/calendarobjectresource.py b/caldav/calendarobjectresource.py index 030eff1c..7632f606 100644 --- a/caldav/calendarobjectresource.py +++ b/caldav/calendarobjectresource.py @@ -704,6 +704,28 @@ def load(self, only_if_unloaded: bool = False) -> Self: raise error.NotFoundError(errmsg(r)) self.data = r.raw # type: ignore except error.NotFoundError: + # Only attempt fallbacks if the object was previously loaded + # (has a UID), indicating the server may have changed the URL. + # Without a UID, the 404 is definitive. + uid = self.id + if uid: + # Fallback 1: try multiget (REPORT may work even when GET fails) + try: + return self.load_by_multiget() + except Exception: + pass + # Fallback 2: re-fetch by UID (server may have changed the URL) + if self.parent and hasattr(self.parent, "get_object_by_uid"): + try: + obj = self.parent.get_object_by_uid(uid) + if obj: + self.url = obj.url + self.data = obj.data + if hasattr(obj, "props"): + self.props.update(obj.props) + return self + except error.NotFoundError: + pass raise except Exception: return self.load_by_multiget() @@ -730,6 +752,19 @@ async def _async_load(self, only_if_unloaded: bool = False) -> Self: raise error.NotFoundError(errmsg(r)) self.data = r.raw # type: ignore except error.NotFoundError: + # Fallback: re-fetch by UID (server may have changed the URL) + uid = self.id + if uid and self.parent and hasattr(self.parent, "get_object_by_uid"): + try: + obj = await self.parent.get_object_by_uid(uid) + if obj: + self.url = obj.url + self.data = obj.data + if hasattr(obj, "props"): + self.props.update(obj.props) + return self + except error.NotFoundError: + pass raise except Exception: # Note: load_by_multiget is sync-only, not supported in async mode yet @@ -862,7 +897,9 @@ def _generate_url(self): ## See https://github.com/python-caldav/caldav/issues/143 for the rationale behind double-quoting slashes ## TODO: should try to wrap my head around issues that arises when id contains weird characters. maybe it's ## better to generate a new uuid here, particularly if id is in some unexpected format. - return self.parent.url.join(quote(self.id.replace("/", "%2F")) + ".ics") + url = self.parent.url.join(quote(self.id.replace("/", "%2F")) + ".ics") + assert " " not in str(url) + return url def change_attendee_status(self, attendee: Any | None = None, **kwargs) -> None: """ @@ -950,11 +987,7 @@ def save( """ # Early return if there's no data (no-op case) - if ( - self._vobject_instance is None - and self._data is None - and self._icalendar_instance is None - ): + if not self.is_loaded(): return self # Helper function to get the full object by UID diff --git a/caldav/collection.py b/caldav/collection.py index 1b48a7d7..9e859db0 100644 --- a/caldav/collection.py +++ b/caldav/collection.py @@ -754,7 +754,7 @@ def delete(self): ## TODO: remove quirk handling from the functional tests ## TODO: this needs test code quirk_info = self.client.features.is_supported("delete-calendar", dict) - wipe = quirk_info["support"] in ("unsupported", "fragile") + wipe = not self.client.features.is_supported("delete-calendar") if quirk_info["support"] == "fragile": ## Do some retries on deleting the calendar for x in range(0, 20): @@ -776,42 +776,31 @@ def delete(self): super().delete() async def _async_calendar_delete(self): - """Async implementation of Calendar.delete(). + """Async implementation of Calendar.delete().""" + import asyncio - Note: Server quirk handling (fragile/wipe modes) is simplified for async. - Most modern servers support proper calendar deletion. - """ quirk_info = self.client.features.is_supported("delete-calendar", dict) + wipe = not self.client.features.is_supported("delete-calendar") - # For fragile servers, try simple delete first if quirk_info["support"] == "fragile": - for _ in range(0, 5): + # Do some retries on deleting the calendar + for _ in range(0, 20): try: await self._async_delete() - return except error.DeleteError: - import asyncio - + pass + try: + await self.search(event=True) await asyncio.sleep(0.3) - # If still failing after retries, fall through to wipe - - if quirk_info["support"] in ("unsupported", "fragile"): - # Need to delete all objects first - # Use the async client's get_events method - try: - events = await self.client.get_events(self) - for event in events: - await event._async_delete() - except Exception: - pass # Best effort - try: - todos = await self.client.get_todos(self) - for todo in todos: - await todo._async_delete() - except Exception: - pass # Best effort + except error.NotFoundError: + wipe = False + break - await self._async_delete() + if wipe: + for obj in await self.search(): + await obj._async_delete() + else: + await self._async_delete() def get_supported_components(self) -> list[Any]: """ @@ -1036,9 +1025,11 @@ def multiget(self, event_urls: Iterable[URL], raise_notfound: bool = False) -> I """ results = self._multiget(event_urls, raise_notfound=raise_notfound) for url, data in results: + # Quote path to handle servers returning unencoded spaces (e.g., Zimbra) + quoted_url = quote(unquote(str(url)), safe="/:@") yield self._calendar_comp_class_by_data(data)( self.client, - url=self.url.join(url), + url=self.url.join(quoted_url), data=data, parent=self, ) diff --git a/caldav/compatibility_hints.py b/caldav/compatibility_hints.py index 59e15c02..a8c10b55 100644 --- a/caldav/compatibility_hints.py +++ b/caldav/compatibility_hints.py @@ -8,6 +8,18 @@ "server implementation details" and "feature database logic" in three separate files. """ import copy +import warnings + +# Valid support levels for features +VALID_SUPPORT_LEVELS = frozenset({ + "full", # Feature works as expected + "unsupported", # Feature not available (may be silently ignored) + "fragile", # Sometimes works, sometimes not + "quirk", # Supported but needs special handling + "broken", # Server does unexpected things + "ungraceful", # Server throws errors (actually most graceful for error handling) + "unknown", # Not yet tested/determined +}) ## NEW STYLE ## (we're gradually moving stuff from the good old @@ -77,6 +89,7 @@ class FeatureSet: "description": "Deleting a calendar does not delete the objects, or perhaps create/delete of calendars does not work at all. For each test run, every calendar resource object should be deleted for every test run", }, "create-calendar": { + "default": { "support": "full" }, "description": "RFC4791 says that \"support for MKCALENDAR on the server is only RECOMMENDED and not REQUIRED because some calendar stores only support one calendar per user (or principal), and those are typically pre-created for each account\". Hence a conformant server may opt to not support creating calendars, this is often seen for cloud services (some services allows extra calendars to be made, but not through the CalDAV protocol). (RFC4791 also says that the server MAY support MKCOL in section 8.5.2. I do read it as MKCOL may be used for creating calendars - which is weird, since section 8.5.2 is titled \"external attachments\". We should consider testing this as well)", }, "create-calendar.auto": { @@ -102,12 +115,18 @@ class FeatureSet: }, "save-load.event": {"description": "it's possible to save and load events to the calendar"}, "save-load.event.recurrences": {"description": "it's possible to save and load recurring events to the calendar - events with an RRULE property set, including recurrence sets"}, - "save-load.event.recurrences.count": {"description": "The server will receive and store a recurring event with a count set in the RRULE"}, + "save-load.event.recurrences.count": {"description": "The server will receive and store a recurring event with a count set in the RRULE", "default": {"support": "full"}}, "save-load.todo": {"description": "it's possible to save and load tasks to the calendar"}, "save-load.todo.recurrences": {"description": "it's possible to save and load recurring tasks to the calendar"}, - "save-load.todo.recurrences.count": {"description": "The server will receive and store a recurring task with a count set in the RRULE"}, - "save-load.todo.mixed-calendar": {"description": "The same calendar may contain both events and tasks (Zimbra only allows tasks to be placed on special task lists)"}, + "save-load.todo.recurrences.count": {"description": "The server will receive and store a recurring task with a count set in the RRULE", "default": {"support": "full"}}, + "save-load.todo.recurrences.thisandfuture": {"description": "Completing a recurring task with rrule_mode='thisandfuture' works (modifies RRULE and saves back to server)", "default": {"support": "full"}}, + "save-load.todo.mixed-calendar": {"description": "The same calendar may contain both events and tasks (Zimbra only allows tasks to be placed on special task lists)", "default": {"support": "full"}}, "save-load.journal": {"description": "The server will even accept journals"}, + ## TODO: zimbra cannot mix events and tasks, but then davis surprised me by not allowing journals on the same calendar. But this may be a miss in the checking script - it may be that mixing is allowed, but that the calendar has to be set up from scratch with explicit support for both VJOURNAL and other things + "save-load.journal.mixed-calendar": {"description": "The same calendar may contain events, tasks and journals (some servers require journals on a dedicated VJOURNAL calendar)", "default": {"support": "full"}}, + "save-load.get-by-url": { + "description": "GET requests to calendar object resource URLs work correctly. When unsupported, the server returns 404 on GET even for valid object URLs. The client works around this by falling back to UID-based lookup.", + }, "save-load.reuse-deleted-uid": { "description": "After deleting an event, the server allows creating a new event with the same UID. When 'broken', the server keeps deleted events in a trashbin with a soft-delete flag, causing unique constraint violations on UID reuse. See https://github.com/nextcloud/server/issues/30096" }, @@ -121,6 +140,7 @@ class FeatureSet: "description": "In all the search examples in the RFC, comptype is given during a search, the client specifies if it's event or tasks or journals that is wanted. However, as I read the RFC this is not required. If omitted, the server should deliver all objects. Many servers will not return anything if the COMPTYPE filter is not set. Other servers will return 404" }, "search.comp-type": { + "type": "server-peculiarity", "description": "Server correctly filters calendar-query results by component type. When 'broken', server may misclassify component types (e.g., returning TODOs when VEVENTs are requested). The library will perform client-side filtering to work around this issue", "default": {"support": "full"} }, @@ -132,8 +152,10 @@ class FeatureSet: "description": "Time-range searches should only return events/todos that actually fall within the requested time range. Some servers incorrectly return recurring events whose recurrences fall outside (after) the search interval, or events with no recurrences in the requested time range at all. RFC4791 section 9.9 specifies that a VEVENT component overlaps a time range if the condition (start < search_end AND end > search_start) is true.", "links": ["https://datatracker.ietf.org/doc/html/rfc4791#section-9.9"], }, - "search.time-range.todo": {"description": "basic time range searches for tasks works"}, - "search.time-range.event": {"description": "basic time range searches for event works"}, + "search.time-range.todo": {"description": "basic time range searches for tasks works", "default": {"support": "full"}}, + "search.time-range.todo.old-dates": {"description": "time range searches for tasks with old dates (e.g. year 2000) work - some servers enforce a min-date-time restriction"}, + "search.time-range.event": {"description": "basic time range searches for event works", "default": {"support": "full"}}, + "search.time-range.event.old-dates": {"description": "time range searches for events with old dates (e.g. year 2000) work - some servers enforce a min-date-time restriction"}, "search.time-range.journal": {"description": "basic time range searches for journal works"}, "search.time-range.alarm": {"description": "Time range searches for alarms work. The server supports searching for events based on when their alarms trigger, as specified in RFC4791 section 9.9"}, "search.is-not-defined": { @@ -170,7 +192,8 @@ class FeatureSet: "description": "tasks can also be recurring" }, "search.recurrences.includes-implicit.todo.pending": { - "description": "a future recurrence of a pending task should always be pending and appear in searches for pending tasks" + "description": "a future recurrence of a pending task should always be pending and appear in searches for pending tasks", + "default": {"support": "full"}, }, "search.recurrences.includes-implicit.event": { "description": "support for events" @@ -305,7 +328,16 @@ def copyFeatureSet(self, feature_set, collapse=True): if feature == 'old_flags': self._old_flags = feature_set[feature] continue - feature_info = self.find_feature(feature) + try: + feature_info = self.find_feature(feature) + except (AssertionError, KeyError): + warnings.warn( + f"Unknown feature '{feature}' in configuration. " + "This might be a typo. Check caldav/compatibility_hints.py for valid features.", + UserWarning, + stacklevel=3, + ) + feature_info = {} value = feature_set[feature] if feature not in self._server_features: self._server_features[feature] = {} @@ -313,14 +345,27 @@ def copyFeatureSet(self, feature_set, collapse=True): if isinstance(value, bool): server_node['support'] = "full" if value else "unsupported" elif isinstance(value, str) and 'support' not in server_node: + self._validate_support_level(value, feature) server_node['support'] = value elif isinstance(value, dict): + if 'support' in value: + self._validate_support_level(value['support'], feature) server_node.update(value) else: assert False if collapse: self.collapse() + def _validate_support_level(self, level, feature_name): + """Validate that a support level is valid, warn if not.""" + if level not in VALID_SUPPORT_LEVELS: + warnings.warn( + f"Feature '{feature_name}' has invalid support level '{level}'. " + f"Valid levels: {', '.join(sorted(VALID_SUPPORT_LEVELS))}", + UserWarning, + stacklevel=4, + ) + def _collapse_key(self, feature_dict): """ Extract the key part of a feature dictionary for comparison during collapse. @@ -414,6 +459,12 @@ def is_supported(self, feature, return_type=bool, return_defaults=True, accept_f while True: if feature_ in self._server_features: return self._convert_node(self._server_features[feature_], feature_info, return_type, accept_fragile) + # Try deriving status from subfeatures at this level + current_info = feature_info if feature_ == feature else self.find_feature(feature_) + if 'default' not in current_info: + derived = self._derive_from_subfeatures(feature_, current_info, return_type, accept_fragile) + if derived is not None: + return derived if '.' not in feature_: if not return_defaults: return None @@ -425,37 +476,43 @@ def is_supported(self, feature, return_type=bool, return_defaults=True, accept_f return self._convert_node(self._default(feature_info), feature_info, return_type, accept_fragile) feature_ = feature_[:feature_.rfind('.')] + _POSITIVE_STATUSES = frozenset({'full', 'quirk'}) + def _derive_from_subfeatures(self, feature, feature_info, return_type, accept_fragile=False): """ Derive parent feature status from explicitly set subfeatures. Logic: - Only consider subfeatures WITHOUT explicit defaults (those are independent features) - - If all relevant subfeatures have the same status → use that status - - If subfeatures have mixed statuses → return "unknown" - (since we can't definitively determine the parent's status) - - Returns None if no relevant subfeatures are explicitly set. + - If ANY relevant subfeature has a positive status (full/quirk) → derive as that status + (any support means the parent has some support) + - If ALL relevant subfeatures are set AND all have the same negative status → use that status + - If only a PARTIAL set of subfeatures is configured with all negative statuses → + return None (incomplete information, fall through to default) + - Mixed statuses (some positive, some negative) → "unknown" + + Returns None if no relevant subfeatures are explicitly set or if + derivation is inconclusive due to partial information. """ if 'subfeatures' not in feature_info or not feature_info['subfeatures']: return None - # Collect statuses from explicitly set subfeatures (excluding independent ones) + # Count relevant subfeatures (those without explicit defaults) and collect statuses + total_relevant = 0 subfeature_statuses = [] for sub in feature_info['subfeatures']: subfeature_key = f"{feature}.{sub}" - if subfeature_key in self._server_features: - # Skip subfeatures with explicit defaults - they represent independent behaviors - # not hierarchical components of the parent feature - try: - subfeature_info = self.find_feature(subfeature_key) - if 'default' in subfeature_info: - # This subfeature has an explicit default, meaning it's independent - continue - except: - # If we can't find the feature info, include it conservatively - pass + # Skip subfeatures with explicit defaults - they represent independent behaviors + try: + subfeature_info = self.find_feature(subfeature_key) + if 'default' in subfeature_info: + continue + except: + pass + + total_relevant += 1 + if subfeature_key in self._server_features: sub_dict = self._server_features[subfeature_key] # Extract the support level (or enable/behaviour/observed) status = sub_dict.get('support', sub_dict.get('enable', sub_dict.get('behaviour', sub_dict.get('observed')))) @@ -466,13 +523,26 @@ def _derive_from_subfeatures(self, feature, feature_info, return_type, accept_fr if not subfeature_statuses: return None - # Check if all subfeatures have the same status - if all(status == subfeature_statuses[0] for status in subfeature_statuses): - # All same - use that status + has_positive = any(s in self._POSITIVE_STATUSES for s in subfeature_statuses) + all_same = all(s == subfeature_statuses[0] for s in subfeature_statuses) + is_complete = len(subfeature_statuses) >= total_relevant + + if has_positive: + if all_same: + derived_status = subfeature_statuses[0] + else: + # Mixed positive/negative → unknown + derived_status = 'unknown' + elif is_complete and all_same: + # All relevant subfeatures set, all the same negative status derived_status = subfeature_statuses[0] - else: - # Mixed statuses - we don't have complete/consistent information + elif is_complete: + # All relevant subfeatures set, mixed non-positive statuses derived_status = 'unknown' + else: + # Partial set with only non-positive statuses → inconclusive, + # the unset siblings might have different (positive) status + return None # Create a node dict with the derived status derived_node = {'support': derived_status} @@ -776,7 +846,7 @@ def dotted_feature_set_list(self, compact=False): } xandikos_main = xandikos_v0_3.copy() -xandikos_main.pop('search.recurrences.expanded.todo') +xandikos_main.pop('freebusy-query.rfc4791') xandikos = xandikos_main @@ -784,13 +854,12 @@ def dotted_feature_set_list(self, compact=False): ## There is much development going on at Radicale as of summar 2025, ## so I'm expecting this list to shrink a lot soon. radicale = { - "search.text.case-sensitive": {"support": "unsupported"}, "search.is-not-defined": {"support": "fragile", "behaviour": "seems to work for categories but not for dtend"}, + "search.text.case-sensitive": {"support": "unsupported"}, "search.recurrences.includes-implicit.todo.pending": {"support": "fragile", "behaviour": "inconsistent results between runs"}, "search.recurrences.expanded.todo": {"support": "unsupported"}, "search.recurrences.expanded.exception": {"support": "unsupported"}, - 'principal-search': {'support': 'unknown', 'behaviour': 'No display name available - cannot test'}, - 'principal-search.list-all': {'support': 'unsupported'}, + "principal-search": {"support": "unsupported"}, ## this only applies for very simple installations "auto-connect.url": {"domain": "localhost", "scheme": "http", "basepath": "/"}, ## freebusy is not supported yet, but on the long-term road map @@ -814,7 +883,6 @@ def dotted_feature_set_list(self, compact=False): 'auto-connect.url': { 'basepath': '/remote.php/dav', }, - 'search.combined-is-logical-and': {'support': 'unsupported'}, 'search.comp-type-optional': {'support': 'ungraceful'}, 'search.recurrences.expanded.todo': {'support': 'unsupported'}, 'search.recurrences.expanded.exception': {'support': 'unsupported'}, ## TODO: verify @@ -827,13 +895,17 @@ def dotted_feature_set_list(self, compact=False): }, 'search.recurrences.includes-implicit.todo': {'support': 'unsupported'}, #'save-load.todo.mixed-calendar': {'support': 'unsupported'}, ## Why? It started complaining about this just recently. - 'principal-search.by-name': {'support': 'unsupported'}, - 'principal-search.list-all': {'support': 'ungraceful'}, + 'principal-search.by-name.self': {'support': 'unsupported'}, + 'principal-search': {'support': 'ungraceful'}, 'old_flags': ['unique_calendar_ids'], } ## TODO: Latest - mismatch between config and test script in delete-calendar.free-namespace ... and create-calendar.set-displayname? ecloud = nextcloud | { + ## Possibly a limitation on free accounts (extra calendars may require a paid plan) + 'create-calendar': {'support': 'unsupported'}, + 'search.is-not-defined': {'support': 'unsupported'}, + 'search.text.case-sensitive': {'support': 'unsupported'}, ## TODO: this applies only to test runs, not to ordinary usage 'rate-limit': { 'enable': True, @@ -851,21 +923,27 @@ def dotted_feature_set_list(self, compact=False): ## Zimbra is not very good at it's caldav support zimbra = { 'auto-connect.url': {'basepath': '/dav/'}, + 'delete-calendar': {'support': 'fragile', 'behaviour': 'may move to trashbin instead of deleting immediately'}, + ## save-load.get-by-url was unsupported in older Zimbra versions (GET to + ## valid calendar object URLs returned 404), but works in zimbra/zcs-foss:latest + ## Zimbra treats same-UID events across calendars as aliases of the same event + 'save.duplicate-uid.cross-calendar': {'support': 'unsupported'}, 'search.recurrences.expanded.exception': {'support': 'unsupported'}, ## TODO: verify 'create-calendar.set-displayname': {'support': 'unsupported'}, 'save-load.todo.mixed-calendar': {'support': 'unsupported'}, 'save-load.todo.recurrences.count': {'support': 'unsupported'}, ## This is a new problem? - 'save-load.journal': "ungraceful", - 'search.is-not-defined': {'support': 'unsupported'}, + 'save-load.journal': {'support': 'ungraceful'}, + 'sync-token': {'support': 'fragile'}, #'search.text': 'unsupported', ## weeeird ... it wasn't like this before + 'search.is-not-defined': {'support': 'unsupported'}, 'search.text.substring': {'support': 'unsupported'}, 'search.text.category': {'support': 'ungraceful'}, 'search.recurrences.expanded.todo': { "support": "unsupported" }, 'search.comp-type-optional': {'support': 'fragile'}, ## TODO: more research on this, looks like a bug in the checker, + 'search.text.case-sensitive': {'support': 'unsupported'}, + 'search.text.case-insensitive': {'support': 'unsupported'}, 'search.time-range.alarm': {'support': 'unsupported'}, - 'sync-token': {'support': 'ungraceful'}, - 'principal-search': "ungraceful", - 'save.duplicate-uid.cross-calendar': {'support': 'unsupported', "behaviour": "moved-instead-of-copied" }, + 'principal-search': "unsupported", "old_flags": [ ## apparently, zimbra has no journal support @@ -876,7 +954,7 @@ def dotted_feature_set_list(self, compact=False): ## earlier versions of Zimbra display-name could be changed, but ## then the calendar would not be available on the old URL ## anymore) - 'event_by_url_is_broken', + ## 'event_by_url_is_broken' removed - works in zimbra/zcs-foss:latest 'no_delete_event', 'vtodo_datesearch_notime_task_is_skipped', 'no_relships', @@ -895,6 +973,8 @@ def dotted_feature_set_list(self, compact=False): bedework = { 'search.comp-type': {'support': 'broken', 'behaviour': 'Server returns everything when searching for events and nothing when searching for todos'}, + 'search.comp-type-optional': {'support': 'ungraceful'}, + 'search.time-range.event': {'support': 'unsupported'}, #"search.combined-is-logical-and": { "support": "unsupported" }, ## TODO: play with this and see if it's needed 'search-cache': {'behaviour': 'delay', 'delay': 1.5}, @@ -906,12 +986,10 @@ def dotted_feature_set_list(self, compact=False): # Ephemeral Docker container: wipe objects (delete-calendar not supported) 'test-calendar': {'cleanup-regime': 'wipe-calendar'}, 'auto-connect.url': {'basepath': '/ucaldav/'}, - "save-load.journal": { - "support": "ungraceful" - }, - "search.time-range.alarm": { - "support": "unsupported" - }, + 'save-load.journal': {'support': 'ungraceful'}, + 'save-load.todo.recurrences.thisandfuture': {'support': 'ungraceful'}, + ## search.time-range.alarm: not checked by the server tester + 'search.time-range.alarm': {'support': 'unsupported'}, ## Huh? Non-deterministic behaviour of the checking script? #"save.duplicate-uid.cross-calendar": { # "support": "unsupported", @@ -923,7 +1001,13 @@ def dotted_feature_set_list(self, compact=False): "search.time-range.todo": { "support": "unsupported" }, - "search.text": { + "search.text.case-sensitive": { + "support": "unsupported" + }, + "search.text.case-insensitive": { + "support": "unsupported" + }, + "search.text.category": { "support": "unsupported" }, "search.is-not-defined": { @@ -933,14 +1017,9 @@ def dotted_feature_set_list(self, compact=False): "support": "fragile", "behaviour": "sometimes the text search delivers everything, other times it doesn't deliver anything. When the text search delivers everything, then the post-filtering will save the day" }, - "search.time-range.accurate": { - "support": "unsupported" - }, - "search.recurrences.includes-implicit.todo": { - "support": "unsupported" - }, - "search.recurrences.includes-implicit.infinite-scope": { - "support": "unsupported" + "search.recurrences.includes-implicit": { + "support": "unsupported", + "behaviour": "cannot reliably test due to broken comp-type filtering" }, "sync-token": { "support": "fragile" @@ -952,10 +1031,9 @@ def dotted_feature_set_list(self, compact=False): "search.recurrences.expanded.event": { "support": "unsupported" }, - ## It doesn't support expanding events, but it supports exapnding tasks!? - ## Or maybe there is a problem in the checker script? - ## TODO: look into this - #"search.recurrences.expanded.todo": True, + "search.recurrences.expanded.todo": { + "support": "unsupported" + }, "principal-search": { "support": "ungraceful", } @@ -963,52 +1041,54 @@ def dotted_feature_set_list(self, compact=False): synology = { 'principal-search': False, - 'search.time-range.alarm': False, 'sync-token': 'fragile', 'delete-calendar': False, 'search.comp-type-optional': 'fragile', + 'search.is-not-defined': {'support': 'fragile', 'behaviour': 'works for CLASS but not for CATEGORIES'}, + 'search.text.case-sensitive': {'support': 'unsupported'}, + 'search.time-range.alarm': {'support': 'unsupported'}, "search.recurrences.expanded.exception": False, - 'old_flags': ['vtodo_datesearch_nodtstart_task_is_skipped'], + 'old_flags': ['vtodo_datesearch_nodtstart_task_is_skipped'], + 'test-calendar': {'cleanup-regime': 'wipe-calendar'}, } baikal = { ## version 0.10.1 "http.multiplexing": "fragile", ## ref https://github.com/python-caldav/caldav/issues/564 - "save-load.journal": {'support': 'ungraceful'}, - #'search.comp-type-optional': {'support': 'ungraceful'}, ## Possibly this has been fixed? + 'search.comp-type-optional': {'support': 'ungraceful'}, 'search.recurrences.expanded.todo': {'support': 'unsupported'}, 'search.recurrences.expanded.exception': {'support': 'unsupported'}, 'search.recurrences.includes-implicit.todo': {'support': 'unsupported'}, - "search.combined-is-logical-and": {"support": "unsupported"}, - 'principal-search.by-name': {'support': 'unsupported'}, ## This is weird - I'm quite sure the tests were passing without this one some few days ago. - 'principal-search.list-all': {'support': 'ungraceful'}, ## This is weird - I'm quite sure the tests were passing without this one some few days ago. + 'save-load.journal.mixed-calendar': {'support': 'unsupported'}, + 'principal-search': {'support': 'ungraceful'}, + 'principal-search.by-name.self': {'support': 'unsupported'}, + 'principal-search.list-all': {'support': 'ungraceful'}, #'sync-token.delete': {'support': 'unsupported'}, ## Perhaps on some older servers? 'old_flags': [ ## extra features not specified in RFC5545 "calendar_order", - "calendar_color" - ] + "calendar_color", + ], } ## TODO: testPrincipals, testWrongAuthType, testTodoDatesearch fails ## Some unknown version of baikal has this baikal_old = baikal | { 'create-calendar': {'support': 'quirk', 'behaviour': 'mkcol-required'}, 'create-calendar.auto': {'support': 'unsupported'}, ## this is the default, but the "quirk" from create-calendar overwrites it. Hm. - } cyrus = { "search.comp-type-optional": {"support": "ungraceful"}, "search.recurrences.expanded.exception": {"support": "unsupported"}, - 'search.time-range.alarm': {'support': 'unsupported'}, + "search.time-range.alarm": {"support": "ungraceful"}, 'principal-search': {'support': 'ungraceful'}, + # Cyrus enforces unique UIDs across all calendars for a user + "save.duplicate-uid.cross-calendar": {"support": "unsupported"}, # Ephemeral Docker container: wipe objects but keep calendar (avoids UID conflicts) "test-calendar": {"cleanup-regime": "wipe-calendar"}, 'delete-calendar': { 'support': 'fragile', 'behaviour': 'Deleting a recently created calendar fails'}, - 'save.duplicate-uid.cross-calendar': {'support': 'ungraceful'}, # Cyrus may not properly reject wrong passwords in some configurations - 'wrong-password-check': {'support': 'unsupported'}, 'old_flags': [] } @@ -1030,16 +1110,15 @@ def dotted_feature_set_list(self, compact=False): # lazy responses cause MultiplexingError when accessing status_code "http.multiplexing": { "support": "unsupported" }, "search.comp-type-optional": { "support": "fragile" }, - "search.recurrences.expanded.todo": { "support": "unsupported" }, "search.recurrences.expanded.exception": { "support": "unsupported" }, - 'search.time-range.alarm': {'support': 'unsupported'}, + "search.time-range.alarm": { "support": "unsupported" }, 'sync-token': {'support': 'fragile'}, 'principal-search': {'support': 'unsupported'}, 'principal-search.list-all': {'support': 'unsupported'}, "old_flags": [ #'no_journal', ## it threw a 500 internal server error! ## for old versions #'nofreebusy', ## for old versions - 'fragile_sync_tokens', ## no issue raised yet + ## 'fragile_sync_tokens' removed - covered by 'sync-token': {'support': 'fragile'} 'vtodo_datesearch_nodtstart_task_is_skipped', ## no issue raised yet 'date_todo_search_ignores_duration', 'calendar_color', @@ -1049,47 +1128,33 @@ def dotted_feature_set_list(self, compact=False): } sogo = { - "save-load.journal": { "support": "ungraceful" }, - 'freebusy-query.rfc4791': {'support': 'ungraceful'}, - "search.time-range.accurate": { - "support": "unsupported", - "description": "SOGo returns events/todos that fall outside the requested time range. For recurring events, it may return recurrences that start after the search interval ends, or events with no recurrences in the requested range at all." - }, - "search.time-range.alarm": { - "support": "unsupported" - }, - "search.time-range.event": { + "save-load.journal": {"support": "ungraceful"}, + "search.is-not-defined": {"support": "unsupported"}, + "search.text.case-sensitive": { "support": "unsupported" }, - "search.time-range.todo": { + "search.text.case-insensitive": { "support": "unsupported" }, - "search.text": { - "support": "unsupported" - }, - "search.text.by-uid": True, - "search.is-not-defined": { + "search.time-range.alarm": { "support": "unsupported" }, "search.comp-type-optional": { "support": "unsupported" }, - "search.recurrences.includes-implicit.todo": { - "support": "unsupported" - }, - ## TODO: do some research into this, I think this is a bug in the checker script - "search.recurrences.includes-implicit.todo.pending": { + ## includes-implicit.todo has been observed as both supported and unsupported + ## across different test runs. Other includes-implicit children are unsupported. + ## Marking the parent as fragile to avoid cascading derivation issues. + "search.recurrences.includes-implicit": { "support": "fragile" }, - "search.recurrences.includes-implicit.infinite-scope": { - "support": "unsupported" - }, "sync-token": { "support": "fragile" }, "search.recurrences.expanded": { "support": "unsupported" }, + "freebusy-query.rfc4791": {"support": "unsupported"}, "principal-search": { "support": "ungraceful", "behaviour": "Search by name failed: ReportError at '501 Not Implemented - \n\n

An error occurred during object publishing

did not find the specified REPORT

\n\n', reason no reason", @@ -1140,7 +1205,7 @@ def dotted_feature_set_list(self, compact=False): 'basepath': '/principals/', # TODO: this seems fishy }, "save-load.journal": { "support": "ungraceful" }, - "delete-calendar": { "support": "fragile" }, + "delete-calendar": { "support": "unsupported" }, "search.is-not-defined": { "support": "unsupported" }, "search.time-range.todo": { "support": "unsupported" }, "search.time-range.alarm": {'support': 'unsupported'}, @@ -1158,8 +1223,8 @@ def dotted_feature_set_list(self, compact=False): 'no_scheduling', 'no_supported_components_support', 'no_relships', - 'unique_calendar_ids', ], + 'test-calendar': {'cleanup-regime': 'wipe-calendar'}, "sync-token": False, } @@ -1170,15 +1235,24 @@ def dotted_feature_set_list(self, compact=False): 'basepath': '/', }, 'create-calendar': {'support': 'unsupported'}, - 'save-load.journal': { "support": "ungraceful" }, + 'save-load.journal': {'support': 'unsupported'}, ## TODO1: we should ignore cases where observations are unknown while configuration is known ## TODO2: there are more calendars available at the posteo account, so it should be possible to check this. "save.duplicate-uid.cross-calendar": { "support": "unknown" }, 'search.comp-type-optional': {'support': 'ungraceful'}, + 'search.text.case-sensitive': {'support': 'unsupported'}, + ## Text search precondition check returns unexpected results on posteo + ## (possibly stale data on non-deletable calendar), so substring support + ## cannot be reliably determined. + 'search.text.substring': {'support': 'unknown'}, + ## search.time-range.todo was previously unsupported on posteo but + ## is now observed as working for recent dates (as of 2026-02). + ## Old dates (year 2000) still don't work. + 'search.time-range.todo.old-dates': {'support': 'unsupported'}, 'search.recurrences.expanded.todo': {'support': 'unsupported'}, 'search.recurrences.expanded.exception': {'support': 'unsupported'}, 'search.recurrences.includes-implicit.todo': {'support': 'unsupported'}, - "search.combined-is-logical-and": {"support": "unsupported"}, + 'search.combined-is-logical-and': {'support': 'unsupported'}, 'sync-token': {'support': 'ungraceful'}, 'principal-search': {'support': 'unsupported'}, 'old_flags': [ @@ -1201,13 +1275,79 @@ def dotted_feature_set_list(self, compact=False): # 'no_relships', ## mail.ru recreates the icalendar content, and strips everything it doesn't know anyhting about, including relationship info #] +## Davis uses sabre/dav (same backend as Baikal), so hints are similar. +## To be refined after running tests. +davis = { + "search.recurrences.expanded.todo": {"support": "unsupported"}, + "search.recurrences.expanded.exception": {"support": "unsupported"}, + "search.recurrences.includes-implicit.todo": {"support": "unsupported"}, + "principal-search.by-name.self": {"support": "unsupported"}, + "principal-search": {"support": "ungraceful"}, + "save-load.journal.mixed-calendar": {"support": "unsupported"}, + "search.comp-type-optional": {"support": "ungraceful"}, + "old_flags": [ + "calendar_order", + "calendar_color", + ], +} + +## Apple CalendarServer (CCS) - archived 2019, Python 2/Twisted. +## MKCALENDAR always creates VEVENT-only calendars; supported-calendar-component-set +## cannot be changed. The pre-provisioned "tasks" calendar supports VTODO only. +## VJOURNAL is not supported at all. +ccs = { + "save-load.journal": {"support": "unsupported"}, + "save-load.todo.mixed-calendar": {"support": "unsupported"}, + # CCS enforces unique UIDs across ALL calendars for a user + "save.duplicate-uid.cross-calendar": {"support": "unsupported"}, + # CCS rejects multi-instance VTODOs (thisandfuture recurring completion) + "save-load.todo.recurrences.thisandfuture": {"support": "unsupported"}, + "search.time-range.event": {"support": "full"}, + "search.time-range.event.old-dates": {"support": "ungraceful"}, + "search.time-range.todo": {"support": "full"}, + "search.time-range.todo.old-dates": {"support": "ungraceful"}, + "search.comp-type-optional": {"support": "ungraceful"}, + "search.text.case-sensitive": {"support": "unsupported"}, + "search.time-range.alarm": {"support": "unsupported"}, + "search.recurrences": {"support": "unsupported"}, + "principal-search": {"support": "unsupported"}, + # Ephemeral Docker container: wipe objects (avoids UID conflicts across calendars) + "test-calendar": {"cleanup-regime": "wipe-calendar"}, + "old_flags": [ + "propfind_allprop_failure", + ], +} + purelymail = { ## Purelymail claims that the search indexes are "lazily" populated, ## so search works some minutes after the event was created/edited. 'search-cache': {'behaviour': 'delay', 'delay': 160}, "create-calendar.auto": {"support": "full"}, + ## 409 Conflict with when PUTting to a URL not under an existing calendar + 'save-load.get-by-url': {'support': 'unknown'}, + 'save-load.todo': {'support': 'ungraceful'}, + 'search.comp-type-optional': {'support': 'ungraceful'}, + ## The search features below are unreliable on purelymail, likely due + ## to the 160s search-cache delay. Results flip between unsupported + ## and ungraceful across runs. Marked fragile so the checker skips them. + ## was: (default, i.e. full) - observed ungraceful 2026-02 + 'search.is-not-defined': {'support': 'fragile'}, 'search.time-range.alarm': {'support': 'unsupported'}, - 'principal-search': {'support': 'unsupported'}, + ## was: unsupported - observed ungraceful 2026-02 + 'search.time-range.event': {'support': 'fragile'}, + ## was: ungraceful - observed unsupported 2026-02 (for .old-dates) + 'search.time-range.todo': {'support': 'fragile'}, + 'search.text.case-sensitive': {'support': 'unsupported'}, + 'search.text.case-insensitive': {'support': 'unsupported'}, + ## was: (default, i.e. full) - observed unknown 2026-02 + ## Search-cache delay makes substring check unreliable + 'search.text.substring': {'support': 'unknown'}, + 'search.text.category': {'support': 'unsupported'}, + 'search.recurrences.expanded': {'support': 'unsupported'}, + 'search.recurrences.includes-implicit': {'support': 'unsupported'}, + 'principal-search': {'support': 'ungraceful'}, + 'principal-search.by-name.self': {'support': 'ungraceful'}, + 'principal-search.list-all': {'support': 'ungraceful'}, 'auto-connect.url': { 'basepath': '/webdav/', 'domain': 'purelymail.com', @@ -1227,16 +1367,19 @@ def dotted_feature_set_list(self, compact=False): 'auto-connect.url': { 'scheme': 'https', 'domain': 'caldav.gmx.net', - ## This won't work yet. I'm not able to connect with gmx at all now, - ## so unable to create a verified fix for it now - 'basepath': '/begenda/dav/{username}/calendar', ## TODO: foobar + 'basepath': '/begenda/dav/{username}/', + }, + 'rate-limit': { + 'enable': True, + 'interval': 3, + 'count': 1, }, - 'create-calendar': {'support': 'unsupported'}, 'search.comp-type-optional': {'support': 'fragile', 'description': 'unexpected results from date-search without comp-type - but only sometimes - TODO: research more'}, 'search.recurrences.expanded': {'support': 'unsupported'}, - 'search.time-range.alarm': {'support': 'unsupported'}, + 'search.text.case-sensitive': {'support': 'unsupported'}, 'sync-token': {'support': 'unsupported'}, - 'principal-search': {'support': 'unsupported'}, + 'principal-search': {'support': 'ungraceful'}, + 'principal-search.by-name.self': {'support': 'unsupported'}, 'freebusy-query.rfc4791': {'support': 'unsupported'}, "old_flags": [ "no_scheduling_mailbox", diff --git a/caldav/config.py b/caldav/config.py index 153d6be0..25d0819d 100644 --- a/caldav/config.py +++ b/caldav/config.py @@ -1,3 +1,4 @@ +import copy import json import logging import os @@ -103,26 +104,19 @@ def read_config(fn, interactive_error=False): try: with open(fn, "rb") as config_file: return yaml.load(config_file, yaml.Loader) - except yaml.scanner.ScannerError: - logging.error( - f"config file {fn} exists but is neither valid json nor yaml. Check the syntax." - ) + except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e: + # Re-raise YAML errors so they can be handled by caller + raise ValueError(f"config file {fn} is neither valid JSON nor YAML: {e}") from e except ImportError: - logging.error( - f"config file {fn} exists but is not valid json, and pyyaml is not installed." - ) + raise ValueError(f"config file {fn} is not valid JSON, and pyyaml is not installed") except FileNotFoundError: ## File not found - logging.info("no config file found") + logging.debug(f"config file {fn} not found") + return {} except ValueError: - if interactive_error: - logging.error( - "error in config file. Be aware that the interactive configuration will ignore and overwrite the current broken config file", - exc_info=True, - ) - else: - logging.error("error in config file. It will be ignored", exc_info=True) + # Re-raise ValueError so caller can handle config errors + raise return {} @@ -162,6 +156,39 @@ def replacer(match: re.Match) -> str: return value +def resolve_features(features): + """Resolve a features specification into a dict suitable for FeatureSet. + + Supports: + - None: returns None (backward compatibility mode) + - str: looks up a named profile from compatibility_hints + e.g. "synology" or "compatibility_hints.synology" + - dict with "base" key: loads a named profile and merges overrides + e.g. {"base": "synology", "search.is-not-defined": {"support": "fragile"}} + - dict without "base": used as-is + """ + import caldav.compatibility_hints + + if features is None: + return None + if isinstance(features, str): + feature_name = features + if feature_name.startswith("compatibility_hints."): + feature_name = feature_name[len("compatibility_hints.") :] + return getattr(caldav.compatibility_hints, feature_name) + if isinstance(features, dict) and "base" in features: + base_name = features["base"] + if isinstance(base_name, str): + if base_name.startswith("compatibility_hints."): + base_name = base_name[len("compatibility_hints.") :] + base_features = copy.deepcopy(getattr(caldav.compatibility_hints, base_name)) + for key, value in features.items(): + if key != "base": + base_features[key] = value + return base_features + return features + + # Valid connection parameter keys for DAVClient CONNKEYS = frozenset( [ @@ -232,7 +259,11 @@ def get_connection_params( if explicit_params: # Filter to valid connection keys conn_params = {k: v for k, v in explicit_params.items() if k in CONNKEYS} - if conn_params.get("url"): + if conn_params.get("url") or conn_params.get("features"): + # Return when URL is given, or when features are given (the + # client constructor resolves URL from auto-connect.url hints + # via _auto_url()). Don't fall through to env vars/config + # files when the caller explicitly provided connection info. return conn_params # Check for config file path from environment early (needed for test server config too) @@ -359,6 +390,41 @@ def _extract_conn_params_from_section(section_data: dict[str, Any]) -> dict[str, if key in CONNKEYS: conn_params[key] = expand_env_vars(value) elif k == "features" and section_data[k]: - conn_params["features"] = section_data[k] + conn_params["features"] = resolve_features(section_data[k]) return conn_params if conn_params.get("url") else None + + +def get_all_test_servers( + config_file: str | None = None, +) -> dict[str, dict[str, Any]]: + """ + Get all test servers from config file. + + Finds all sections with 'testing_allowed: true' and returns their + connection parameters. + + Args: + config_file: Optional explicit path to config file. + If None, searches default locations. + + Returns: + Dict mapping section names to connection parameter dicts. + Each dict contains: url, username, password, features, etc. + """ + cfg = read_config(config_file) + if not cfg: + return {} + + result: dict[str, dict[str, Any]] = {} + for section_name in cfg: + section_data = config_section(cfg, section_name) + if section_data.get("testing_allowed"): + conn_params = _extract_conn_params_from_section(section_data) + if conn_params: + # Also copy the raw section data for keys not in CONNKEYS + # (e.g., testing_allowed itself, or custom keys) + conn_params["_raw_config"] = section_data + result[section_name] = conn_params + + return result diff --git a/caldav/davclient.py b/caldav/davclient.py index f13ecb86..72fb9b07 100644 --- a/caldav/davclient.py +++ b/caldav/davclient.py @@ -8,6 +8,7 @@ For async code, use: from caldav import aio """ +import copy import logging import sys import warnings @@ -52,10 +53,11 @@ from caldav.lib import error from caldav.lib.python_utilities import to_normal_str, to_wire from caldav.lib.url import URL -from caldav.objects import log from caldav.requests import HTTPBearerAuth from caldav.response import BaseDAVResponse +log = logging.getLogger("caldav") + if sys.version_info < (3, 11): from typing_extensions import Self else: @@ -84,6 +86,8 @@ ## TODO: this is also declared in davclient.DAVClient.__init__(...) # Import CONNKEYS from config to avoid duplication +from caldav.config import resolve_features as _resolve_features + def _auto_url( url, @@ -258,8 +262,7 @@ def __init__( ## Deprecation TODO: give a warning, user should use get_davclient or auto_calendar instead. Probably. - if isinstance(features, str): - features = getattr(caldav.compatibility_hints, features) + features = _resolve_features(features) self.features = FeatureSet(features) self.huge_tree = huge_tree @@ -507,7 +510,7 @@ def get_calendars(self, principal: Principal | None = None) -> list[Calendar]: principal = client.get_principal() calendars = client.get_calendars(principal) for cal in calendars: - print(f"Calendar: {cal.name}") + print(f"Calendar: {cal.get_display_name()}") """ from caldav.operations.calendarset_ops import ( _extract_calendars_from_propfind_results as extract_calendars, @@ -519,7 +522,9 @@ def get_calendars(self, principal: Principal | None = None) -> list[Calendar]: # Get calendar-home-set from principal calendar_home_url = self._get_calendar_home_set(principal) if not calendar_home_url: - return [] + # Fall back to the principal URL as calendar home + # (some servers like GMX don't support calendar-home-set) + calendar_home_url = str(principal.url) # Make URL absolute if relative calendar_home_url = self._make_absolute_url(calendar_home_url) @@ -1042,29 +1047,34 @@ def get_calendars(**kwargs) -> list["Calendar"]: return _base_get_calendars(DAVClient, **kwargs) -def get_calendar(**kwargs) -> Optional["Calendar"]: +def get_calendar(**kwargs) -> "CalendarResult": """ Get a single calendar from a CalDAV server. This is a convenience function for the common case where only one - calendar is needed. It returns the first matching calendar or None. + calendar is needed. Returns a CalendarResult that can be used as a + context manager. Args: Same as :func:`get_calendars`. Returns: - A single Calendar object, or None if no calendars found. + CalendarResult wrapping a Calendar object (or None if not found). + Use as context manager to auto-close the connection. Example:: from caldav import get_calendar - calendar = get_calendar(calendar_name="Work", url="...", ...) - if calendar: - events = calendar.get_events() + with get_calendar(calendar_name="Work", url="...", ...) as calendar: + if calendar: + events = calendar.date_search(start=..., end=...) """ + from caldav.base_client import CalendarResult + calendars = _base_get_calendars(DAVClient, **kwargs) - return calendars[0] if calendars else None + calendar = calendars[0] if calendars else None + return CalendarResult(calendar, client=calendars.client) def get_davclient(**kwargs) -> Optional["DAVClient"]: diff --git a/caldav/davobject.py b/caldav/davobject.py index ae49f28d..2d4cf018 100644 --- a/caldav/davobject.py +++ b/caldav/davobject.py @@ -96,6 +96,7 @@ def __init__( self.url = None else: self.url = URL.objectify(url) + assert " " not in str(self.url) @property def canonical_url(self) -> str: @@ -319,6 +320,63 @@ async def _async_get_property( foo = await self._async_get_properties([prop], **passthrough) return foo.get(prop.tag, None) + def _resolve_properties(self, properties: dict) -> dict: + """Resolve the correct property dict from a PROPFIND response. + + Servers may return hrefs that don't exactly match the request-URI. + RFC 4918, Section 9.1: "Clients MUST be able to handle the case + where the href in the response does not match the request-URI." + + This method handles various known mismatches (trailing slashes, + double slashes, iCloud quirks) and returns the resolved property + dict, also updating self.props as a side effect. + """ + from .collection import ( + Principal, + ) ## late import to avoid cyclic dependencies + + if self.url is None: + raise ValueError("Unexpected value None for self.url") + + path = unquote(self.url.path) + if path.endswith("/"): + exchange_path = path[:-1] + else: + exchange_path = path + "/" + + if path in properties: + rc = properties[path] + elif exchange_path in properties: + if not isinstance(self, Principal): + log.warning( + f"The path {path} was not found in the properties, but {exchange_path} was. " + "This may indicate a server bug or a trailing slash issue." + ) + rc = properties[exchange_path] + elif self.url in properties: + rc = properties[self.url] + elif "/principal/" in properties and path.endswith("/principal/"): + ## Workaround for a known iCloud bug - the properties key is + ## expected to be the same as the path, but iCloud returns /principal/ + rc = properties["/principal/"] + elif "//" in path and path.replace("//", "/") in properties: + ## Workaround for double slashes in path (issue #302) + rc = properties[path.replace("//", "/")] + elif len(properties) == 1: + ## Ref https://github.com/python-caldav/caldav/issues/191 + ## RFC 4918, Section 9.1: "Clients MUST be able to handle + ## the case where the href in the response does not match + ## the request-URI." Accept whatever the server returns. + rc = list(properties.values())[0] + else: + log.warning( + f"Path handling problem. Path expected: {path}, " + f"paths found: {list(properties.keys())}" + ) + error.assert_(False) + self.props.update(rc) + return rc + def get_properties( self, props: Sequence[BaseElement] | None = None, @@ -348,10 +406,6 @@ def get_properties( if self.is_async_client: return self._async_get_properties(props, depth, parse_response_xml, parse_props) - from .collection import ( - Principal, - ) ## late import to avoid cyclic dependencies - rc = None response = self._query_properties(props, depth) if not parse_response_xml: @@ -380,28 +434,7 @@ def get_properties( error.assert_(properties) - if self.url is None: - raise ValueError("Unexpected value None for self.url") - - path = unquote(self.url.path) - if path.endswith("/"): - exchange_path = path[:-1] - else: - exchange_path = path + "/" - - if path in properties: - rc = properties[path] - elif exchange_path in properties: - if not isinstance(self, Principal): - log.warning( - f"The path {path} was not found in the properties, but {exchange_path} was. " - "This may indicate a server bug or a trailing slash issue." - ) - rc = properties[exchange_path] - else: - error.assert_(False) - self.props.update(rc) - return rc + return self._resolve_properties(properties) async def _async_get_properties( self, @@ -411,10 +444,6 @@ async def _async_get_properties( parse_props: bool = True, ): """Async implementation of get_properties.""" - from .collection import ( - Principal, - ) ## late import to avoid cyclic dependencies - rc = None response = await self._async_query_properties(props, depth) if not parse_response_xml: @@ -443,28 +472,7 @@ async def _async_get_properties( error.assert_(properties) - if self.url is None: - raise ValueError("Unexpected value None for self.url") - - path = unquote(self.url.path) - if path.endswith("/"): - exchange_path = path[:-1] - else: - exchange_path = path + "/" - - if path in properties: - rc = properties[path] - elif exchange_path in properties: - if not isinstance(self, Principal): - log.warning( - f"The path {path} was not found in the properties, but {exchange_path} was. " - "This may indicate a server bug or a trailing slash issue." - ) - rc = properties[exchange_path] - else: - error.assert_(False) - self.props.update(rc) - return rc + return self._resolve_properties(properties) def set_properties(self, props: Any | None = None) -> Self: """ diff --git a/caldav/lib/url.py b/caldav/lib/url.py index 05f48465..c2b426e0 100644 --- a/caldav/lib/url.py +++ b/caldav/lib/url.py @@ -69,7 +69,8 @@ def __eq__(self, other: object) -> bool: return str(me) == str(other) def __hash__(self) -> int: - return hash(str(self)) + # Must use canonical form to match __eq__ behavior + return hash(str(self.canonical())) # TODO: better naming? Will return url if url is already a URL # object, else will instantiate a new URL object diff --git a/caldav/lib/vcal.py b/caldav/lib/vcal.py index 2feaf748..4f29666f 100644 --- a/caldav/lib/vcal.py +++ b/caldav/lib/vcal.py @@ -228,6 +228,9 @@ def create_ical(ical_fragment=None, objtype=None, language="en_DK", **props): elif prop.startswith("alarm_"): alarm[prop[6:]] = props[prop] else: + # Remove existing property to avoid duplicates when + # overriding values from an ical_fragment + component.pop(prop, None) component.add(prop, props[prop]) if alarm: add_alarm(my_instance, alarm) diff --git a/caldav/objects.py b/caldav/objects.py deleted file mode 100755 index baae6421..00000000 --- a/caldav/objects.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -""" -I got fed up with several thousand lines of code in one and the same file. - -This file is by now just a backward compatibility layer. - -Logic has been split out: - -* DAVObject base class -> davobject.py -* CalendarObjectResource base class -> calendarobjectresource.py -* Event/Todo/Journal/FreeBusy -> calendarobjectresource.py -* Everything else (mostly collection objects) -> collection.py -""" - -## For backward compatibility -from .calendarobjectresource import * -from .collection import * -from .davobject import * diff --git a/caldav/operations/calendarset_ops.py b/caldav/operations/calendarset_ops.py index 501a15e6..e7f6fe03 100644 --- a/caldav/operations/calendarset_ops.py +++ b/caldav/operations/calendarset_ops.py @@ -11,7 +11,7 @@ import logging from dataclasses import dataclass from typing import Any -from urllib.parse import quote +from urllib.parse import quote, unquote, urlparse, urlunparse log = logging.getLogger("caldav") @@ -183,6 +183,25 @@ def _find_calendar_by_id( return None +def _quote_url_path(url: str) -> str: + """ + Quote the path component of a URL to handle spaces and special characters. + + Some servers (e.g., Zimbra) return URLs with unencoded spaces in the path. + This function ensures the path is properly percent-encoded. + + Args: + url: URL string that may contain unencoded characters in path + + Returns: + URL with properly encoded path + """ + parsed = urlparse(url) + # quote the path, but unquote first to avoid double-encoding + quoted_path = quote(unquote(parsed.path), safe="/@") + return urlunparse(parsed._replace(path=quoted_path)) + + def _extract_calendars_from_propfind_results( results: list[Any] | None, ) -> list[CalendarInfo]: @@ -206,8 +225,8 @@ def _extract_calendars_from_propfind_results( if not is_calendar_resource(result.properties): continue - # Extract calendar info - url = result.href + # Extract calendar info - quote URL path to handle spaces + url = _quote_url_path(result.href) name = result.properties.get("{DAV:}displayname") cal_id = _extract_calendar_id_from_url(url) diff --git a/caldav/operations/search_ops.py b/caldav/operations/search_ops.py index 14af04ae..00bdcf99 100644 --- a/caldav/operations/search_ops.py +++ b/caldav/operations/search_ops.py @@ -240,7 +240,15 @@ def _filter_search_results( result = [] for o in objects: if searcher.expand or post_filter: - filtered = searcher.check_component(o, expand_only=not post_filter) + try: + filtered = searcher.check_component(o, expand_only=not post_filter) + except ValueError: + ## Server returned data with invalid recurrence structure + ## (e.g. after compatibility hacks stripped DURATION). + ## Include the object unfiltered rather than crashing. + filtered = [ + x for x in o.icalendar_instance.subcomponents if not isinstance(x, Timezone) + ] if not filtered: continue else: @@ -392,9 +400,10 @@ def _build_search_xml_query( raise error.ConsistencyError(f"unsupported comp class {comp_class} for search") # Special hack for bedework - no comp_filter, do client-side filtering + # Keep comp_class so the caller knows what type to filter for client-side + # and to prevent _search_with_comptypes from being triggered again if _hacks == "no_comp_filter": comp_filter = None - comp_class = None # Add property filters for property in searcher._property_operator: diff --git a/caldav/search.py b/caldav/search.py index daae2882..c75f7915 100644 --- a/caldav/search.py +++ b/caldav/search.py @@ -229,13 +229,14 @@ def _search_impl( ## Handle servers with broken component-type filtering (e.g., Bedework) comp_type_support = calendar.client.features.is_supported("search.comp-type", str) - if ( + no_comp_filter = ( (self.comp_class or self.todo or self.event or self.journal) and comp_type_support == "broken" - and not _hacks and post_filter is not False - ): - _hacks = "no_comp_filter" + ) + if no_comp_filter: + if not _hacks: + _hacks = "no_comp_filter" post_filter = True ## Setting default value for post_filter @@ -353,7 +354,8 @@ def _search_impl( clone.expand = False if ( - calendar.client.features.is_supported("search.text") + not no_comp_filter + and calendar.client.features.is_supported("search.text") and calendar.client.features.is_supported("search.combined-is-logical-and") and ( not calendar.client.features.is_supported( diff --git a/docs/design/V3_CODE_REVIEW.md b/docs/design/V3_CODE_REVIEW.md index 6f03ac87..13f5aaee 100644 --- a/docs/design/V3_CODE_REVIEW.md +++ b/docs/design/V3_CODE_REVIEW.md @@ -1,271 +1,362 @@ -# v3.0 Code Review Findings +# v3.0 Comprehensive Code Review -**Date:** January 2026 -**Reviewer:** Claude Opus 4.5 (AI-assisted review) -**Branch:** v3.0-dev - -This document summarizes the code review findings for the v3.0.0 release candidate. +**Date:** February 2026 +**Reviewer:** Claude Opus 4.6 (AI-assisted review) +**Branch:** v3.0-dev (119 commits since v2.2.6) +**Scope:** All changes between tags v2.2.6 and HEAD ## Executive Summary -The codebase is in good shape for a v3.0 release. The Sans-I/O architecture is well-implemented with clear separation of concerns. There are some areas of technical debt (duplicated code, test coverage gaps) that are noted for future work but are not release blockers. +The v3.0 release is a major architectural refactoring introducing Sans-I/O separation, full async support, and comprehensive API modernization -- all while maintaining backward compatibility with v2.x. The scope is large: 159 files changed, ~25,900 lines added, ~4,500 removed. + +**Overall assessment:** The architecture is well-designed and the codebase is in good shape for an alpha release. The Sans-I/O protocol layer is clean and testable. However, there are several bugs that should be fixed before a stable release, significant code duplication between sync/async paths (~650 lines), and some test coverage gaps. + +**Key findings:** +- 3 bugs that will cause runtime errors +- 1 security concern (UUID1 leaks MAC address in calendar UIDs) +- ~650 lines of sync/async duplication across domain objects +- Test coverage gaps in discovery module and sync client unit tests +- `breakpoint()` left in production code --- -## Duplicated Code +## 1. Architecture Changes -### Addressed Duplications (January 2026) +### 1.1 Sans-I/O Protocol Layer (`caldav/protocol/`) -The following duplications have been consolidated: +The protocol layer separates XML construction/parsing from I/O. This is the strongest part of the refactoring. -| Code Section | Status | Solution | -|--------------|--------|----------| -| `_get_calendar_home_set()` | ✅ Fixed | Extracted to `_extract_calendar_home_set_from_results()` in principal_ops.py | -| `get_calendars()` result processing | ✅ Fixed | Extracted to `_extract_calendars_from_propfind_results()` in calendarset_ops.py | -| Property lists for PROPFIND | ✅ Fixed | Moved to `BaseDAVClient.CALENDAR_LIST_PROPS` and `CALENDAR_HOME_SET_PROPS` | +| File | Lines | Rating | Purpose | +|------|-------|--------|---------| +| `types.py` | 243 | 9/10 | Frozen dataclasses: DAVRequest, DAVResponse, PropfindResult, CalendarQueryResult | +| `xml_builders.py` | 428 | 7/10 | Pure functions building XML for PROPFIND, calendar-query, MKCALENDAR, etc. | +| `xml_parsers.py` | 455 | 5/10 | Parse XML responses into typed results -- has a runtime bug | +| `__init__.py` | 46 | 8/10 | Clean re-exports | -### Remaining Duplications +**Issues found:** -| Code Section | Location (Sync) | Location (Async) | Duplication % | Lines | -|--------------|-----------------|------------------|---------------|-------| -| `propfind()` response parsing | davclient.py:280-320 | async_davclient.py:750-790 | 90% | ~40 | -| Auth type extraction | davclient.py:180-210 | async_davclient.py:420-450 | 100% | ~30 | +1. **BUG: NameError in `xml_parsers.py:260`** -- `parse_calendar_multiget_response()` calls `parse_calendar_query_response()` (no leading underscore), but only `_parse_calendar_query_response()` is defined. This will crash at runtime when calendar-multiget responses are parsed. -**Remaining estimated duplicated lines:** ~70 lines (down from ~240) +2. **Dead code in `xml_builders.py`** -- `_to_utc_date_string()` (line 401), `_build_freebusy_query_body()` (line 189), and `_build_mkcol_body()` (line 200) have zero callers. -### Future Refactoring Opportunities +3. **`CalendarInfo` name collision** -- `protocol/types.py:149` and `operations/calendarset_ops.py:24` define different dataclasses named `CalendarInfo` with different fields. Both are exported from their respective `__init__.py`. -The remaining duplication is in areas that are harder to consolidate due to sync/async differences: -1. HTTP response handling (different response object types) -2. Auth negotiation (requires I/O) +4. **Heavy duplication with `response.py`** -- Multistatus stripping, status validation, response element parsing, and the Confluence `%2540` workaround are duplicated nearly verbatim between `xml_parsers.py` and `response.py`. -These could potentially be addressed with a more sophisticated abstraction, but the current level is acceptable. +### 1.2 Operations Layer (`caldav/operations/`) ---- +Pure functions for CalDAV business logic. Well-structured but has some issues. + +| File | Lines | Rating | Purpose | +|------|-------|--------|---------| +| `base.py` | 189 | 8/10 | QuerySpec dataclass, URL helpers | +| `davobject_ops.py` | 293 | 7/10 | DAV property CRUD operations | +| `calendarobject_ops.py` | 531 | 6/10 | Calendar object lifecycle | +| `calendar_ops.py` | 261 | 7/10 | Search and sync-token operations | +| `calendarset_ops.py` | 245 | 7/10 | Calendar collection management | +| `principal_ops.py` | 162 | 7/10 | Principal discovery | +| `search_ops.py` | 445 | 6/10 | Advanced search query building | + +**Issues found:** -## Dead Code +1. **SECURITY: UUID1 leaks MAC address (`calendarobject_ops.py:55`)** -- `uuid.uuid1()` embeds the host MAC address into generated calendar UIDs. Since calendar events are routinely shared, this is a privacy leak. Should use `uuid.uuid4()`. -### Functions That Should Be Removed +2. **`search_ops.py` mutates inputs (line 381-389)** -- `_build_search_xml_query` calls `setattr(searcher, flag, True)` on the passed-in searcher object. This violates the "pure functions" contract and causes side effects in callers. -| Function | Location | Reason | -|----------|----------|--------| -| `auto_calendars()` | davclient.py:1037-1048 | Raises `NotImplementedError` | -| `auto_calendar()` | davclient.py:1051-1055 | Raises `NotImplementedError` | +3. **MD5 in FIPS environments (`calendar_ops.py:132`)** -- `hashlib.md5()` without `usedforsecurity=False` will fail in FIPS-mode environments. Used for fake sync tokens, not for security. -### Unused Imports +4. **Duplicate URL quoting** -- `quote(uid.replace("/", "%2F"))` pattern appears at both lines 62 and 138 in `calendarobject_ops.py`. -| Import | Location | Status | -|--------|----------|--------| -| `CONNKEYS` | davclient.py:87 | Imported but never used | +### 1.3 Data State Management (`caldav/datastate.py`) -### Recommendation +**Rating: 9/10** -- Excellent implementation of the State pattern for managing calendar data representations (raw string, icalendar, vobject). Smart optimizations for lazy switching between formats. Addresses issue #613. -Remove these in a cleanup commit before or after the v3.0 release. Low priority as they don't affect functionality. +### 1.4 Response Handling (`caldav/response.py`) + +**Rating: 6/10** -- `BaseDAVResponse` provides shared XML parsing for sync/async clients, but has significant duplication with the protocol layer and thread-unsafe mutable state. + +**Issues found:** + +1. **Unguarded index access (line 169)** -- `_strip_to_multistatus` accesses `tree[0]` without checking `len(tree) > 0` when `tree.tag == "xml"`. The equivalent code in `xml_parsers.py:279` does guard this. + +2. **Thread-unsafe** -- `self.objects`, `self.results`, `self._responses` are mutable instance state set during parsing. If a response object is shared between threads, results could be corrupt. --- -## Test Coverage Assessment +## 2. Async Support and Client Architecture -### Coverage by Module +### 2.1 Base Client (`caldav/base_client.py`) -| Module | Coverage | Rating | Notes | -|--------|----------|--------|-------| -| `caldav/protocol/` | Excellent | 9/10 | Pure unit tests, no mocking needed | -| `caldav/operations/` | Excellent | 9/10 | Well-tested request building | -| `caldav/async_davclient.py` | Good | 8/10 | Dedicated unit tests exist | -| `caldav/davclient.py` | Poor | 4/10 | Only integration tests | -| `caldav/collection.py` | Moderate | 6/10 | Integration tests cover most paths | -| `caldav/search.py` | Good | 7/10 | Complex search logic tested | -| `caldav/discovery.py` | None | 0/10 | No dedicated tests | +**Rating: 7/10** -- Good ABC extracting shared auth logic, URL handling, and factory functions. -### Coverage Gaps +**Issues:** +- `CalendarResult.__getattr__` (line 255) hides `None` calendars behind `AttributeError` -- confusing error message +- Missing `__aenter__`/`__aexit__` on `CalendarCollection`/`CalendarResult` -- async `get_calendars()` returns a plain list with no cleanup mechanism +- `get_davclient` and `get_calendars` factory functions are duplicated in `async_davclient.py` -#### 1. Error Handling (Rating: 2/10) +### 2.2 Async Client (`caldav/async_davclient.py`) -Missing tests for: -- Network timeout scenarios -- Malformed XML responses -- Authentication failures mid-session -- Server returning unexpected status codes -- Partial/truncated responses +**Rating: 6/10** -- Functional but has bugs, duplication, and architectural concerns. -**Example missing test:** -```python -def test_propfind_malformed_xml(): - """Should handle malformed XML gracefully.""" - client = DAVClient(...) - # Mock response with invalid XML - with pytest.raises(DAVError): - client.propfind(url, body) -``` +**Issues found:** -#### 2. Edge Cases (Rating: 3/10) +1. **BUG: `HTTPBearerAuth` incompatible with httpx (line 862)** -- `HTTPBearerAuth` is a requests/niquests `AuthBase` subclass that implements `__call__(self, r)`. httpx uses a different auth protocol (`httpx.Auth` with `auth_flow`). Bearer auth will fail at runtime on the httpx code path. -Missing tests for: -- Empty calendar responses -- Calendars with thousands of events -- Unicode in calendar names/descriptions -- Very long URLs -- Concurrent modifications +2. **BUG: Missing `url.unauth()` call** -- The sync client strips credentials from URLs at `davclient.py:342`, but the async client never does. Credentials embedded in URLs could leak in log messages. -#### 3. Sync DAVClient Unit Tests +3. **`_auto_url` blocks the event loop** -- RFC6764 discovery performs synchronous DNS lookups and HTTP requests inside `AsyncDAVClient.__init__()`, which is called from `async get_davclient()`. This blocks the event loop. -The sync `DAVClient` lacks dedicated unit tests. All testing happens through integration tests in `tests/test_caldav.py`. This makes it harder to: -- Test error conditions -- Verify specific code paths -- Run tests without a server +4. **Sync import dependency** -- `async_davclient.py:198` imports from `caldav.davclient`, pulling the sync HTTP stack into async contexts. `_auto_url` should live in `caldav/config.py` or `caldav/discovery.py`. -**Recommendation:** Add `tests/test_davclient.py` mirroring `tests/test_async_davclient.py` +5. **Password encoding asymmetry** -- Sync client encodes password to bytes eagerly (`davclient.py:329`), async does not. This creates different code paths for auth building. -#### 4. Discovery Module +6. **Response parsing boilerplate** -- The pattern `if response.status in (200, 207) and response._raw: ...` is repeated in ~5 methods. Should be a helper. -`caldav/discovery.py` has zero test coverage. This module handles: -- RFC 6764 DNS-based service discovery -- Well-known URI probing -- Domain validation +### 2.3 Sync Client (`caldav/davclient.py`) -**Risk:** DNS discovery bugs could cause security issues or connection failures. +**Rating: 6/10** -- Mature but has accumulated technical debt. ---- +**Issues found:** + +1. **Bare `except:` clauses (lines 367, 679)** -- Catches `SystemExit` and `KeyboardInterrupt`. Should be `except Exception:`. + +2. **`NotImplementedError` for auth failures (line 996)** -- When no supported auth scheme is found, `NotImplementedError` is raised. Should be `AuthorizationError`. + +3. **Type annotation gaps** -- Multiple `headers: Mapping[str, str] = None` parameters where `None` is not in the union type. -## Architecture Assessment +4. **`propfind` API divergence** -- Sync version (line 754) takes `props=None` which can be either XML string or property list. Async version (line 512) has separate `body` and `props` parameters. -### Strengths +### 2.4 Lazy Imports (`caldav/__init__.py`) -1. **Clean Sans-I/O Protocol Layer** - - XML building/parsing is pure and testable - - Same code serves sync and async - - Well-documented with type hints +**Rating: 8/10** -- Clean PEP 562 implementation. `import caldav` is now fast. -2. **Dual-Mode Domain Objects** - - `Calendar`, `Principal`, `Event` work with both client types - - Automatic detection of sync vs async context +Minor: `_LAZY_SUBMODULES` could be `frozenset`. No `DAVResponse` export (probably intentional). -3. **Good Separation of Concerns** - - Protocol layer: XML handling - - Operations layer: Request building - - Client layer: HTTP execution - - Domain layer: User-facing API +### 2.5 Async Entry Point (`caldav/aio.py`) -### Weaknesses +**Rating: 7/10** -- Clean re-export module with backward-compat `Async*` aliases. -1. **Client Code Duplication** - - Significant overlap between sync and async clients - - Changes must be made in two places +Issue: No `get_calendars`/`get_calendar` re-export -- users must import from `async_davclient` directly. -2. **Mixed Responsibilities in collection.py** - - 2000+ lines mixing domain logic with HTTP calls - - Could benefit from further extraction to operations layer +### 2.6 Auth Utilities (`caldav/lib/auth.py`) -3. **Inconsistent Error Handling** - - Some methods return `None` on error - - Others raise exceptions - - Logging levels inconsistent +**Rating: 8/10** -- Clean, pure functions with good type annotations. + +Minor: `WWW-Authenticate` parsing (line 31) splits on commas, which fails for headers with commas inside quoted strings (e.g., Bearer challenges with `error_description`). --- -## API Consistency +## 3. Domain Object Changes + +### 3.1 DAVObject (`caldav/davobject.py`) + +**Rating: 7/10** -- Solid dual-mode foundation. + +**Issues:** +1. **Production-unsafe assert (`line 99`)** -- `assert " " not in str(self.url)` will be silently removed with `-O`. Also passes when `url=None`. +2. **Return type lies for async** -- Methods like `get_property()`, `get_properties()`, `delete()` return coroutines when used with async clients, but annotations say `str | None`, `dict`, `None`. +3. **`set_properties` regression** -- Changed from per-property status checking to HTTP status-only checking, losing ability to detect partial PROPPATCH failures. + +### 3.2 Collection (`caldav/collection.py`) -### Legacy vs Recommended Methods +**Rating: 6/10** -- Functional but at 2,054 lines is the largest file and could benefit from extraction. -See [API_NAMING_CONVENTIONS.md](API_NAMING_CONVENTIONS.md) for the full naming convention guide. +**Issues:** +1. **Missing deprecation warnings** -- `calendars()`, `events()`, `todos()` etc. have docstring notes but no `warnings.warn()` calls (unlike `date_search` and `davobject.name` which do emit). +2. **`_generate_fake_sync_token` uses MD5 (line 1655)** -- Same FIPS concern as calendar_ops.py. +3. **`Principal._async_get_property` overrides parent (line 352)** with incompatible implementation. -| Legacy Method | Recommended Method | Notes | -|---------------|-------------------|-------| -| `date_search()` | `search()` | Deprecated with warning | -| `event.instance` | `event.icalendar_component` | Deprecated in v2.0 | -| `client.auto_conn()` | `get_davclient()` | Renamed | +### 3.3 CalendarObjectResource (`caldav/calendarobjectresource.py`) -### Capability Check Aliases +**Rating: 7/10** -- Good DataState integration but large (1,919 lines). -Added for API consistency (v3.0): -- `client.supports_dav()` → alias for `client.check_dav_support()` -- `client.supports_caldav()` → alias for `client.check_caldav_support()` -- `client.supports_scheduling()` → alias for `client.check_scheduling_support()` +**Issues:** +1. **BUG: `_set_deprecated_vobject_instance` (line 1248)** -- Calls `self._get_vobject_instance(inst)` but `_get_vobject_instance` takes no arguments. Will raise `TypeError` when invoked via `event.instance = some_vobject`. +2. **`id` setter is a no-op (line 123)** -- `id` passed to constructor is silently ignored. +3. **`_async_load` missing multiget fallback** -- Sync `load()` has `load_by_multiget()` fallback, async does not. +4. **Dual data model risk** -- Old `_data`/`_vobject_instance`/`_icalendar_instance` coexist with new `_state`. Manual sync at lines 1206, 1279 could desynchronize. + +### 3.4 Search (`caldav/search.py`) + +**Rating: 7/10** -- Excellent generator-based Sans-I/O pattern. + +**Issues:** +1. **Generator error handling (lines 516-545)** -- `except StopIteration: return []` silently swallows premature generator exits, masking bugs. +2. **Double-loading (lines 448-467)** -- Objects loaded twice as "partial workaround for #201" with `except Exception: pass` masking errors. +3. **`TypesFactory` shadowing (line 25)** -- Class shadowed by instance at module level. --- -## GitHub Issues Review +## 4. Configuration and Compatibility + +### 4.1 Config System (`caldav/config.py`) + +**Rating: 7/10** -- Good centralized configuration with clear priority chain. -### Issue #71: calendar.add_event can update as well +**Issues:** +1. **`config_section` name shadowing (line 329)** -- Parameter shadows module-level function. +2. **`_extract_conn_params_from_section` rejects URL-less configs (line 395)** -- Returns `None` for sections without URL, conflicting with feature-based auto-connect. +3. **`read_config` inconsistency** -- Returns `None` when searching defaults (line 91) but `{}` when explicit file not found (line 116). -**Status:** Open (since v0.7 milestone) -**Summary:** Suggests renaming `add_` to `save_` +### 4.2 Compatibility Hints (`caldav/compatibility_hints.py`) -**Analysis:** -- Current API has both `add_event()` and `save_event()` -- `add_event()` is a convenience wrapper that creates and saves -- `save_event()` saves an existing or new event -- The naming reflects intent: "add" = create new, "save" = persist changes +**Rating: 7/10** -- Comprehensive server database. -**Recommendation:** Document the distinction clearly. Not a v3.0 blocker. +**Issues:** +1. **`breakpoint()` in production code (line 443)** -- `FeatureSet._default()` has `breakpoint()` in the `else` branch. Will pause execution in production. +2. **Deprecated `incompatibility_description` dict still present (lines 660-778)** -- Marked "TO BE REMOVED" with 30+ entries. +3. **`# fmt: off` for entire 1,366-line file** -- Should scope it to just the dict definitions. -### Issue #613: Implicit data conversions +--- + +## 5. Code Duplication Analysis + +### 5.1 Sync/Async Client Duplication (~70 lines) -**Status:** Open -**Summary:** Accessing `.data`, `.icalendar_instance`, `.vobject_instance` can cause implicit conversions with side effects +| Code Section | davclient.py | async_davclient.py | Similarity | +|---|---|---|---| +| `search_principals` | 376-435 | 1107-1168 | ~95% (copy-paste + await) | +| `_get_calendar_home_set` | 548-568 | 974-994 | ~95% | +| `get_events` | 570-597 | 996-1023 | ~95% | +| `get_todos` | 599-613 | 1025-1039 | ~95% | +| `propfind` response parsing | 280-320 | 750-790 | ~90% | +| Auth type extraction | 180-210 | 420-450 | ~100% | +| Factory functions | 1015-1078 | 1312-1431 | ~80% | -**Analysis:** -```python -# This sequence looks like a no-op but converts data multiple times: -my_event.data -my_event.icalendar_instance -my_event.vobject_instance -my_event.data # Data may have changed! -``` +### 5.2 Domain Object Async/Sync Duplication (~580 lines) -**Risks:** -- Data representation changes -- CPU waste on conversions -- Potential data loss if reference held across conversion +| File | Duplicated pairs | Approx. lines | +|---|---|---| +| davobject.py | 6 method pairs | ~180 | +| collection.py | 8 method pairs | ~250 | +| calendarobjectresource.py | 4 method pairs | ~100 | +| search.py | 2 method pairs | ~50 | -**Recommendation:** This is a significant API design issue but changing it in v3.0 would be disruptive. Consider for v4.0 with a migration path. +### 5.3 Protocol/Response Duplication + +`response.py` and `protocol/xml_parsers.py` share five pieces of nearly identical logic (multistatus stripping, status validation, response element parsing, `%2540` workaround, propstat loops). --- -## Recommendations +## 6. Test Coverage Assessment -### For v3.0 Release +| Module | Coverage | Rating | Notes | +|---|---|---|---| +| `caldav/protocol/` | Excellent | 9/10 | Pure unit tests in test_protocol.py | +| `caldav/operations/` | Excellent | 9/10 | 6 dedicated test files | +| `caldav/async_davclient.py` | Good | 8/10 | test_async_davclient.py (821 lines) | +| `caldav/datastate.py` | Good | 7/10 | Covered through calendarobject tests | +| `caldav/search.py` | Good | 7/10 | test_search.py + integration tests | +| `caldav/davclient.py` | Poor | 4/10 | Only integration tests, no unit tests | +| `caldav/collection.py` | Moderate | 6/10 | Integration tests cover most paths | +| `caldav/discovery.py` | None | 0/10 | Zero dedicated tests | +| `caldav/config.py` | Poor | 3/10 | Module docstring says "test coverage is poor" | -1. ✅ **Release as-is** - The codebase is stable and functional -2. 📝 **Update CHANGELOG** - Add missing entries for API aliases and issue #128 fix -3. 🧹 **Optional cleanup** - Remove dead code (`auto_calendars`, `auto_calendar`) +### Notable Test Gaps -### For v3.1 or Later +- **Error handling scenarios** -- No tests for malformed XML, network timeouts, partial responses +- **Sync DAVClient unit tests** -- No `test_davclient.py` mirroring `test_async_davclient.py` +- **Discovery module** -- DNS-based discovery has zero test coverage despite security implications +- **Deprecation warnings** -- No tests verify that deprecated methods emit warnings -1. **Reduce duplication** - Extract shared client logic to operations layer -2. **Add sync client unit tests** - Mirror async test structure -3. **Test discovery module** - Add tests for DNS-based discovery -4. **Error handling tests** - Add comprehensive error scenario tests -5. **Address issue #613** - Design solution for implicit conversions +--- -### For v4.0 +## 7. Bugs Summary (Ordered by Severity) -1. **Consider issue #71** - Evaluate `add_*` vs `save_*` naming -2. **Fix implicit conversions** - Redesign data access to avoid side effects -3. **Further refactoring** - Consider splitting collection.py +| # | Severity | Location | Description | +|---|---|---|---| +| 1 | HIGH | `xml_parsers.py:260` | NameError: calls `parse_calendar_query_response` (missing underscore) | +| 2 | HIGH | `async_davclient.py:862` | `HTTPBearerAuth` incompatible with httpx -- bearer auth broken on httpx path | +| 3 | MEDIUM | `calendarobjectresource.py:1248` | `_set_deprecated_vobject_instance` passes arg to no-arg getter | +| 4 | MEDIUM | `compatibility_hints.py:443` | `breakpoint()` in production code path | +| 5 | MEDIUM | `async_davclient.py` | Missing `url.unauth()` -- credential leak in logs | +| 6 | MEDIUM | `calendarobject_ops.py:55` | UUID1 leaks MAC address in calendar UIDs | +| 7 | LOW | `davclient.py:367,679` | Bare `except:` catches SystemExit/KeyboardInterrupt | +| 8 | LOW | `response.py:169` | Unguarded `tree[0]` access | +| 9 | LOW | `davobject.py:99` | Production-unsafe `assert` for URL validation | --- -## Appendix: Test Files +## 8. Recommendations + +### For v3.0 Stable Release (Must Fix) + +1. Fix the NameError in `xml_parsers.py:260` (add underscore) +2. Remove `breakpoint()` from `compatibility_hints.py:443` +3. Fix `_set_deprecated_vobject_instance` to call setter not getter +4. Replace `uuid.uuid1()` with `uuid.uuid4()` in `calendarobject_ops.py:55` +5. Fix bare `except:` to `except Exception:` in `davclient.py` +6. Add `url.unauth()` call to `AsyncDAVClient.__init__` -| Test File | Tests | Purpose | -|-----------|-------|---------| -| `tests/test_protocol.py` | 15+ | Protocol layer unit tests | -| `tests/test_operations_*.py` | 30+ | Operations layer unit tests | -| `tests/test_async_davclient.py` | 20+ | Async client unit tests | -| `tests/test_caldav.py` | 100+ | Integration tests | -| `tests/test_caldav_unit.py` | 10+ | Misc unit tests | +### For v3.0 Stable Release (Should Fix) -### Running Tests +7. Fix `HTTPBearerAuth` for httpx path (or document httpx+bearer as unsupported) +8. Add guard for `tree[0]` access in `response.py:169` +9. Replace production `assert` with proper validation in `davobject.py:99` +10. Add `usedforsecurity=False` to MD5 calls for FIPS compliance -```bash -# Quick unit tests (no server needed) -pytest tests/test_protocol.py tests/test_operations*.py -v +### For v3.1+ -# Full test suite with embedded servers -pytest -k "Radicale or Xandikos" +11. Reduce sync/async client duplication (move `search_principals`, `get_events`, `get_todos` to operations layer) +12. Consolidate `response.py` and `protocol/xml_parsers.py` duplication +13. Add sync DAVClient unit tests mirroring async test structure +14. Add discovery module tests +15. Add missing `warnings.warn()` to all deprecated methods +16. Remove dead code in `xml_builders.py` +17. Move `_auto_url` from `davclient.py` to shared module +18. Make `search_ops._build_search_xml_query` not mutate its input + +### For v4.0 + +19. Address implicit data conversion side effects (issue #613) +20. Consider splitting `collection.py` (2,054 lines) and `calendarobjectresource.py` (1,919 lines) +21. Fix return type annotations for async-capable methods (use `@overload`) +22. Remove `incompatibility_description` dict from compatibility_hints.py + +--- -# Style checks -tox -e style -``` +## Appendix A: New Files Added + +| File | Lines | Purpose | +|------|-------|---------| +| `caldav/async_davclient.py` | 1,431 | Async HTTP client | +| `caldav/base_client.py` | 480 | Shared client ABC | +| `caldav/response.py` | 390 | Shared response parsing | +| `caldav/datastate.py` | 246 | Data representation state machine | +| `caldav/aio.py` | 93 | Async entry point | +| `caldav/lib/auth.py` | 69 | Shared auth utilities | +| `caldav/protocol/types.py` | 243 | Request/response dataclasses | +| `caldav/protocol/xml_builders.py` | 428 | XML construction | +| `caldav/protocol/xml_parsers.py` | 455 | XML parsing | +| `caldav/operations/base.py` | 189 | Query specifications | +| `caldav/operations/search_ops.py` | 445 | Search query building | +| `caldav/operations/calendarobject_ops.py` | 531 | Calendar object ops | +| `caldav/operations/davobject_ops.py` | 293 | DAV object ops | +| `caldav/operations/calendar_ops.py` | 261 | Calendar search/sync ops | +| `caldav/operations/calendarset_ops.py` | 245 | Calendar set ops | +| `caldav/operations/principal_ops.py` | 162 | Principal ops | + +## Appendix B: File Size Concerns + +| File | Lines | Recommendation | +|------|-------|---------------| +| `collection.py` | 2,054 | Extract SynchronizableCalendarObjectCollection, ScheduleMailbox | +| `calendarobjectresource.py` | 1,919 | Extract Todo.complete() and recurring task logic | +| `async_davclient.py` | 1,431 | Reduce by moving shared code to operations layer | +| `compatibility_hints.py` | 1,366 | Consider YAML/JSON for server profiles | +| `davclient.py` | 1,089 | Reduce by moving shared code to operations layer | + +## Appendix C: Test Files Added + +| File | Lines | Purpose | +|------|-------|---------| +| `test_async_davclient.py` | 821 | Async client unit tests | +| `test_async_integration.py` | 466 | Async integration tests | +| `test_operations_calendarobject.py` | 529 | CalendarObject ops tests | +| `test_protocol.py` | 319 | Protocol layer tests | +| `test_operations_calendarset.py` | 277 | CalendarSet ops tests | +| `test_operations_davobject.py` | 288 | DAVObject ops tests | +| `test_operations_principal.py` | 242 | Principal ops tests | +| `test_operations_calendar.py` | 329 | Calendar ops tests | +| `test_operations_base.py` | 192 | Base ops tests | +| `test_lazy_import.py` | 141 | Lazy import verification | diff --git a/docs/design/niquests-code-review.md b/docs/design/niquests-code-review.md new file mode 100644 index 00000000..772d1030 --- /dev/null +++ b/docs/design/niquests-code-review.md @@ -0,0 +1,107 @@ +# Code Review: Niquests + +*Review date: 2026-01-31* + +## Overview + +Niquests is a modern HTTP client library for Python that serves as a drop-in replacement for the Requests library. It maintains API compatibility with Requests while adding advanced features like HTTP/2, HTTP/3 over QUIC, async/await support, and enterprise-grade security features. + +**Key Stats:** +- 41 Python modules in main source +- 23 test files +- Version: 3.17+ (Production Ready) +- Python Support: 3.7+ (including Python 3.14 and PyPy) + +--- + +## Strengths + +### 1. Clean Architecture + +Well-separated sync/async stacks with parallel class hierarchies. The adapter pattern allows for flexible transport customization. + +``` +Public API Layer (api.py / async_api.py) + │ + ▼ +Session Classes (Session / AsyncSession) + │ + ▼ +Adapter Classes (HTTPAdapter / AsyncHTTPAdapter) + │ + ▼ +Request/Response Classes (Request, PreparedRequest, Response, AsyncResponse) +``` + +### 2. Type Safety + +Extensive use of type hints with `TypeAlias` and `@overload` decorators. Strict mypy configuration enforces correctness. + +### 3. Backward Compatibility + +Maintains full Requests API compatibility while adding modern features - a smart migration path for users. + +### 4. Comprehensive Test Suite + +23 test files covering unit tests, integration tests, and live network tests. Good coverage of edge cases. + +### 5. Modern Python Practices + +Uses `from __future__ import annotations`, lazy imports, contextvars for thread/task safety. + +--- + +## Areas for Improvement + +### 1. `_compat.py` - Consider consolidating compatibility logic + +The urllib3/urllib3_future detection is complex. Consider documenting the decision tree more explicitly. + +### 2. `models.py` - Large file (~1600+ lines) + +Contains `Request`, `PreparedRequest`, `Response`, and `AsyncResponse`. Could potentially be split into `request.py` and `response.py` for maintainability. + +### 3. Duplicate code patterns in sync/async + +`HTTPAdapter` and `AsyncHTTPAdapter` share significant logic. Consider a mixin or base class to reduce duplication. Same applies to `Session` vs `AsyncSession`. + +### 4. Hook system complexity + +`hooks.py` handles both sync and async dispatch with runtime detection. The `iscoroutinefunction` checks add overhead. Consider documenting performance implications. + +### 5. Exception hierarchy + +`exceptions.py` has many exception types. Some inherit from multiple bases (e.g., `SSLError` from both `ConnectionError` and `IOError`). The hierarchy could be documented better. + +--- + +## Minor Issues + +| File | Issue | +|------|-------| +| `sessions.py:474` | `hasattr(app, "__call__")` - all callables have `__call__`, consider `callable(app)` | +| `models.py` | Multiple `# type: ignore` comments - could benefit from more specific ignores | +| `utils.py` | Large utility file - consider splitting by domain (url utils, auth utils, etc.) | + +--- + +## Security Considerations + +- **Good**: OS truststore by default (no outdated certifi bundles) +- **Good**: OCSP/CRL support for certificate revocation +- **Good**: No eval/exec usage found +- **Note**: `trust_env=True` reads `.netrc` - documented but worth highlighting in security docs + +--- + +## Recommendations + +1. **Documentation**: Add architecture diagrams to docs showing the sync/async class relationships +2. **Deprecation tracking**: Consider a `DEPRECATIONS.md` file tracking Python version-specific changes +3. **Performance benchmarks**: Add benchmarks comparing sync/async and HTTP/1.1 vs HTTP/2 vs HTTP/3 + +--- + +## Conclusion + +Overall, this is a well-maintained, production-ready codebase with thoughtful design choices. The Python 3.14+ compatibility work demonstrates active maintenance. diff --git a/docs/design/urllib3-future-code-review.md b/docs/design/urllib3-future-code-review.md new file mode 100644 index 00000000..cc76378d --- /dev/null +++ b/docs/design/urllib3-future-code-review.md @@ -0,0 +1,227 @@ +# Code Review: urllib3.future + +*Review date: 2026-01-31* + +## Overview + +urllib3.future is an advanced fork of urllib3 - a powerful, user-friendly HTTP client for Python with native support for HTTP/1.1, HTTP/2, and HTTP/3 protocols. + +**Key Statistics:** +- ~35,756 LOC in src/urllib3 +- 70 test files (59 actual test modules) +- Python Support: 3.7+ (including CPython and PyPy) +- Dependencies: h11 (HTTP/1.1), jh2 (HTTP/2), qh3 (HTTP/3/QUIC) + +--- + +## Strengths + +### 1. Comprehensive Protocol Support + +- Clean abstraction over HTTP/1.1 (h11), HTTP/2 (jh2), and HTTP/3 (qh3) +- Transparent ALPN negotiation - users don't need to explicitly configure protocols +- Alt-Svc header handling for automatic HTTP/3 upgrades + +### 2. Sophisticated Connection Pooling + +- `TrafficPolice` custom queue handles HTTP/2+ multiplexing correctly +- `ResponsePromise` for lazy/deferred responses enables true concurrency +- Background keep-alive management prevents stale connections + +### 3. Advanced DNS Resolution + +- Pluggable resolver architecture (DOH, DOQ, DOT, DOU, system, manual) +- Async variants for all resolvers +- DNSSEC support + +### 4. Backward Compatibility + +- Drop-in urllib3 replacement (version 2.x.9PP scheme) +- Maintains API compatibility while extending functionality + +### 5. Good Test Infrastructure + +- Traefik-based integration tests with real HTTP/2 and HTTP/3 +- Downstream compatibility tests (requests, niquests, boto3) +- 70 test files covering protocols, concurrency, and edge cases + +--- + +## Areas of Concern + +### 1. TrafficPolice Complexity (`util/traffic_police.py` - 1,014 LOC) + +This is the most critical and complex component: + +```python +# Complex state management: +class TrafficState(IntEnum): + IDLE = 0 # Connection available + USED = 1 # Active streams (HTTP/2) + SATURATED = 2 # At max streams +``` + +**Issues:** +- Very difficult to reason about correctness +- Custom synchronization primitives (`PendingSignal`, `ActiveCursor`) +- No formal state machine documentation +- Risk of subtle race conditions + +**Recommendation:** Add state machine diagrams and consider formal verification or extensive fuzz testing. + +### 2. Large Monolithic Classes + +| Class | LOC | Concern | +|-------|-----|---------| +| `HTTPConnectionPool` | 2,402 | Connection lifecycle, pooling, retries all in one | +| `HfaceBackend` | 1,838 | Protocol negotiation, ALPN, upgrade logic | +| `PoolManager` | 1,158 | Pool caching, routing, proxy handling | +| `AsyncHTTPResponse` | 20,229 | Extremely large - needs investigation | + +**Recommendation:** Consider extracting focused classes (e.g., `ConnectionLifecycle`, `ProtocolNegotiator`, `PoolCache`). + +### 3. Technical Debt (16 items found) + +```python +# Examples from codebase: +"TODO(t-8ch): Stop inheriting from AssertionError in v2.0" # ProxySchemeUnknown +"TODO: Remove this when we break backwards compatibility" # URL handling +"FIXME: Can we do this without accessing private httplib _method?" +"FIXME: Is there a better way to differentiate between SSLErrors?" +``` + +**Recommendation:** Create a v3.0 roadmap to address these without breaking current compatibility. + +### 4. Sync/Async Code Duplication + +The async implementation is a near-complete mirror: + +``` +connection.py (1,130 LOC) → _async/connection.py (1,096 LOC) +connectionpool.py (2,402 LOC) → _async/connectionpool.py (2,440 LOC) +poolmanager.py (1,158 LOC) → _async/poolmanager.py (1,005 LOC) +``` + +**Issues:** +- Maintenance burden - changes must be made twice +- Risk of drift between implementations +- ~7,000 LOC of near-duplicate code + +**Recommendation:** Consider: +- Shared base classes with sync/async method variants +- Code generation from a single source +- Template-based approach (like aiofiles uses) + +### 5. Circular Dependency Risk + +``` +util/ imports from backend +backend imports from util +contrib modules have interdependencies +``` + +Heavy use of `TYPE_CHECKING` blocks indicates this is already causing issues: + +```python +if typing.TYPE_CHECKING: + from .connection import HTTPConnection # Avoid circular import +``` + +--- + +## Security Considerations + +| Area | Status | Notes | +|------|--------|-------| +| TLS validation | Good | Proper certificate validation by default | +| ALPN negotiation | Good | Secure protocol selection | +| Fingerprint pinning | Good | Supported | +| Environment variables | Warning | `SSHKEYLOGFILE`, `QUICLOGDIR` should warn in production | +| Proxy auth headers | Good | `NOT_FORWARDABLE_HEADERS` filtering | +| DNS security | Good | DOH/DOQ/DOT options available | + +--- + +## Specific Code Issues + +1. **Exception inheritance oddity** (`exceptions.py`): + ```python + class ProxySchemeUnknown(AssertionError, ValueError): + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0 + ``` + +2. **Private API access** (`response.py`): + ```python + # FIXME: Can we do this without accessing private httplib _method? + ``` + +3. **`AsyncHTTPResponse` at 20,229 LOC** - This needs investigation. Either it's including generated code, or there's significant complexity that should be refactored. + +--- + +## Recommendations + +### Priority 1: Critical + +1. **Audit TrafficPolice** - Add comprehensive documentation and state machine diagrams +2. **Investigate AsyncHTTPResponse size** - 20K LOC for a response class is unusual +3. **Add race condition testing** - Fuzz testing for connection pool + +### Priority 2: Maintainability + +1. **Reduce code duplication** between sync/async +2. **Break up large classes** into focused components +3. **Address technical debt** - Plan v3.0 breaking changes + +### Priority 3: Documentation + +1. **Architecture overview** with diagrams +2. **Protocol negotiation flowchart** +3. **TrafficPolice state machine documentation** + +--- + +## Project Structure + +``` +src/urllib3/ +├── Core modules (sync): +│ ├── connection.py (1,130 LOC) +│ ├── connectionpool.py (2,402 LOC) +│ ├── poolmanager.py (1,158 LOC) +│ ├── response.py (1,086 LOC) +│ └── backend/ +│ ├── _base.py (682 LOC) - Abstract base classes +│ ├── hface.py (1,838 LOC) - Main protocol handler +│ └── _async/ - Async equivalents +├── _async/ - Complete async mirror +├── contrib/ - Extensions: +│ ├── hface/ - HTTP protocol implementations +│ │ └── protocols/ (http1, http2, http3) +│ ├── resolver/ - Advanced DNS resolution +│ │ ├── doh/ - DNS over HTTPS +│ │ ├── doq/ - DNS over QUIC +│ │ ├── dot/ - DNS over TLS +│ │ └── dou/ - DNS over UDP +│ ├── webextensions/ - WebSocket, SSE +│ ├── socks.py - SOCKS proxy support +│ └── pyopenssl.py - OpenSSL backend +├── util/ - Utilities +│ ├── traffic_police.py (1,014 LOC) +│ ├── ssl_.py (864 LOC) +│ ├── timeout.py +│ └── retry.py (558 LOC) +└── exceptions.py - 30+ exception types +``` + +--- + +## Conclusion + +urllib3.future is an ambitious and feature-rich HTTP client with excellent protocol support. The main concerns are: + +- **Complexity** in `TrafficPolice` and protocol negotiation +- **Maintainability** due to large classes and sync/async duplication +- **Technical debt** accumulated from maintaining backward compatibility + +The codebase is well-tested and shows good defensive programming practices, but would benefit from architectural documentation and potential refactoring of the largest components. diff --git a/docs/source/about.rst b/docs/source/about.rst index c506b78a..3fb6125f 100644 --- a/docs/source/about.rst +++ b/docs/source/about.rst @@ -269,7 +269,7 @@ tox should also work: $ tox -e py It will run some unit tests and some functional tests. You may want to add your own -private servers into tests/conf_private.py, see tests/conf_private.py.EXAMPLE +private servers into tests/caldav_test_servers.yaml, see tests/caldav_test_servers.yaml.example Niquests vs Requests vs HTTPX ============================= diff --git a/docs/source/conf.py b/docs/source/conf.py index b86ad723..c128feaa 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -13,6 +13,16 @@ sys.path.insert(0, "../..") # Adjust as needed +# Get version from package +try: + from caldav import __version__ + + version = __version__ + release = __version__ +except ImportError: + version = "unknown" + release = "unknown" + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -60,15 +70,11 @@ def linkcode_resolve(domain, info): # General information about the project. project = "caldav" -copyright = "2010-2024, Cyril Robert, Tobias Brox and other contributors" +copyright = "2010-2026, Cyril Robert, Tobias Brox and other contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -# version = '0' -# The full version, including alpha/beta/rc tags. +# built documents. These are set above from caldav.__version__. # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst index fa1dc442..e6057367 100644 --- a/docs/source/tutorial.rst +++ b/docs/source/tutorial.rst @@ -2,130 +2,83 @@ Tutorial ======== -In this tutorial you should learn basic usage of the python CalDAV -client library. You are encouraged to copy the code examples into a -file and add a ``breakpoint()`` inside the with-block so you can -inspect the return objects you get from the library calls. Do not -name your file `caldav.py` or `calendar.py`, this may break some -imports. +This tutorial covers basic usage of the python CalDAV client library. +Copy code examples into a file and add a ``breakpoint()`` inside the +with-block to inspect return objects. Do not name your file `caldav.py` +or `calendar.py`, as this may break imports. -To follow this tutorial as intended, each code block should be run -towards a clean-slate Radicale server. To do this, you need: +Ad-hoc Configuration +-------------------- -* The source code of caldav with tests: ``git clone https://github.com/python-caldav/caldav.git ; cd caldav`` -* The Radicale python package: ``pip install radicale`` -* An environmental variable set: ``export PYTHON_CALDAV_USE_TEST_SERVER=1`` +To run the tutorial examples against a test server, you need: -With this setup, the with-blocks in the code sections below will spin -up a Radicale server. +* The caldav source with tests: ``git clone https://github.com/python-caldav/caldav.git ; cd caldav`` +* Radicale installed: ``pip install radicale`` +* Environment variable set: ``export PYTHON_CALDAV_USE_TEST_SERVER=1`` -When you've run the tutorial as intended, I recommend going through the examples again towards your own calendar server: +With this setup, the with-blocks below will spin up a Radicale server. -* Set the environment variables ``CALDAV_URL``, ``CALDAV_USER`` and ``CALDAV_PASSWORD`` to point to your personal calendar server. -* Be aware that different calendar servers may behave differently. For instance, not all of them allows you to create a calendar. Some are even read-only. -* You will need to revert all changes done. The code examples below do not do any cleanup. If your calendar server supports creating and deleting calendars, then it should be easy enough: ``my_new_calendar.delete()`` inside the with-block. Events also have a ``.delete()``-method. Beware that there is no ``undo``. You're advised to have a local backup of your calendars. I'll probably write a HOWTO on that one day. -* Usage of a context manager is considered best practice, but not really needed – you may skip the with-statement and write just ``client = get_davclient()``. This will make it easier to test code from the python shell. -Quick Start: Getting Calendars Directly ---------------------------------------- +Real Configuration +------------------ -As of 3.0, there are convenience functions to get calendars directly -without manually creating a client and principal: +The recommended way to configure caldav is through a config file or +environment variables. Create ``~/.config/caldav/caldav.conf``: -.. code-block:: python - - from caldav import get_calendars, get_calendar - - # Get all calendars - calendars = get_calendars( - url="https://caldav.example.com/", - username="alice", - password="secret" - ) - for cal in calendars: - print(f"Found calendar: {cal.name}") +.. code-block:: ini - # Get a specific calendar by name - work_calendar = get_calendar( - url="https://caldav.example.com/", - username="alice", - password="secret", - calendar_name="Work" - ) - - # Get calendars by URL or ID - calendars = get_calendars( - url="https://caldav.example.com/", - username="alice", - password="secret", - calendar_url="/calendars/alice/personal/" # or just "personal" - ) - -These functions also support reading configuration from environment -variables (``CALDAV_URL``, ``CALDAV_USERNAME``, ``CALDAV_PASSWORD``) -or config files, so you can simply call: + # ~/.config/caldav/caldav.conf + [default] + url = https://caldav.example.com/ + username = alice + password = secret -.. code-block:: python +Or set environment variables: - from caldav import get_calendars - calendars = get_calendars() # Uses env vars or config file +.. code-block:: bash -The Traditional Approach ------------------------- + # export CALDAV_URL=https://caldav.example.com/ + # export CALDAV_USERNAME=alice + # export CALDAV_PASSWORD=secret -As of 2.0, it's recommended to start initiating a -:class:`caldav.davclient.DAVClient` object using the ``get_davclient`` -function, go from there to get a -:class:`caldav.collection.Principal`-object, and from there find a -:class:`caldav.collection.Calendar`-object. This is how to do it: +With configuration in place, you can use caldav without hardcoding credentials: .. code-block:: python - from caldav import get_davclient - from caldav.lib.error import NotFoundError + from caldav import get_calendars - with get_davclient() as client: - my_principal = client.get_principal() - try: - my_calendar = my_principal.calendar() - print(f"A calendar was found at URL {my_calendar.url}") - except NotFoundError: - print("You don't seem to have any calendars") + with get_calendars() as calendars: + for cal in calendars: + print(cal.get_display_name()) -Caveat: Things will break if password/url/username is wrong, but -perhaps not where you expect it to. To test, you may try out -``get_davclient(username='alice', password='hunter2', url='https://calendar.example.com/dav/')``. +Getting Calendars +----------------- -The ``calendar``-method above gives one calendar – if you have more -calendars, it will give you the first one it can find – which may not -be the correct one. To filter there are parameters ``name`` and -``cal_id`` – I recommend testing them: +Use :func:`caldav.get_calendars` to get all calendars or filter by name: .. code-block:: python - from caldav import get_davclient - from caldav.lib.error import NotFoundError + from caldav import get_calendars, get_calendar, get_davclient + # First create a calendar to work with with get_davclient() as client: - my_principal = client.get_principal() - try: - my_calendar = my_principal.calendar(name="My Calendar") - except NotFoundError: - print("You don't seem to have a calendar named 'My Calendar'") - -If you happen to know the URL or path for the calendar, you don't need -to go through the principal object. - -.. code-block:: python + my_principal = client.principal() + my_principal.make_calendar(name="Work") - from caldav import get_davclient + # Get all calendars + with get_calendars() as calendars: + for cal in calendars: + print(cal.get_display_name()) - with get_davclient() as client: - my_calendar = client.calendar(url="/dav/calendars/mycalendar") + # Get a specific calendar by name + with get_calendar(calendar_name="Work") as work_calendar: + if work_calendar: + events = work_calendar.search(event=True) -Note that in the example above, no communication is done. If the URL is wrong, you will only know it when trying to save or get objects from the server! +Creating Calendars and Events +----------------------------- -For servers that support it, it may be useful to create a dedicated test calendar – that way you can test freely without risking to mess up your calendar events. Let's populate it with an event while we're at it: +Create a test calendar and add an event: .. code-block:: python @@ -142,7 +95,7 @@ For servers that support it, it may be useful to create a dedicated test calenda summary="Do the needful", rrule={'FREQ': 'YEARLY'}) -You have icalendar code and want to put it into the calendar? Easy! +Add an event from icalendar data: .. code-block:: python @@ -165,12 +118,16 @@ You have icalendar code and want to put it into the calendar? Easy! END:VCALENDAR """) -The best way of getting information out from the calendar is to use the search. Currently most of the logic is done on the server side – and the different calendar servers tend to give different results given the same data and search query. In future versions of the CalDAV library the intention is to do more workarounds and logic on the client side, allowing for more consistent results across different servers. +Searching +--------- + +Use search to find events, tasks, or journals: .. code-block:: python from caldav import get_davclient from datetime import date + import datetime with get_davclient() as client: my_principal = client.get_principal() @@ -191,20 +148,20 @@ The best way of getting information out from the calendar is to use the search. assert len(my_events) == 1 print(my_events[0].data) -``expand`` matters for recurring events and tasks, instead of getting returned the original event (with ``DTSTART`` set in 2023 and an ``RRULE`` set) it will return the *recurrence* for year 2026. Or, rather, a list of recurrences if there are more of them in the search interval. - -``event`` causes the search to only return events. There are three kinds of objects that can be saved to a calendar (but not all servers support all three) – events, journals and tasks (``VEVENT``, ``VJOURNAL`` and ``VTODO``). This is called Calendar Object Resources in the RFC. Now that's quite a mouthful! To ease things, the word "event" is simply used in documentation and communication. So when reading "event", be aware that it actually means "a CalendarObjectResource objects such as an event, but it could also be a task or a journal" – and if you contribute code, remember to use ``CalendarObjectResource`` rather than ``Event``. - -Without ``event=True`` explicitly set, all kinds of objects *should* be returned. Unfortunately many servers return nothing – so as of 2.0, it's important to always specify if you want events, tasks or journals. In future versions of CalDAV there will be workarounds for this so ``event=True`` can be safely skipped, regardless what server is used. +The ``expand`` parameter expands recurring events into individual +occurrences within the search interval. The ``event=True`` parameter +filters results to events only (excluding tasks and journals). -The return type is a list of objects of the type :class:`caldav.calendarobjectresource.Event` – for tasks and journals there are similar classes Todo and Journal. +Modifying Events +---------------- -The ``data`` property delivers the icalendar data as a string. It can be modified: +The ``data`` property contains icalendar data as a string: .. code-block:: python from caldav import get_davclient from datetime import date + import datetime with get_davclient() as client: my_principal = client.get_principal() @@ -217,6 +174,7 @@ The ``data`` property delivers the icalendar data as a string. It can be modifi rrule={'FREQ': 'YEARLY'}) my_events = my_new_calendar.search( + event=True, start=date(2026,5,1), end=date(2026,6,1), expand=True) @@ -225,25 +183,14 @@ The ``data`` property delivers the icalendar data as a string. It can be modifi my_events[0].data = my_events[0].data.replace("Do the needful", "Have fun!") my_events[0].save() -As seen above, we can use ``save()`` to send a modified object back to -the server. In the case above, we've edited a recurrence. Now that -we've saved the object, you're encouraged to test with search with and -without expand set and with different years, print out -``my_event[0].data`` and see what results you'll get. The -``save()``-method also takes a parameter ``all_recurrences=True`` if -you want to edit the full series! - -The code above is far from "best practice". You should not try to -parse or modify ``event.data`` directly. Use the icalendar library instead. - -Most events contain one *component* (always true when using ``expand=True``). -The ``event.component`` property gives easy access to the -:class:`icalendar.cal.Event`-object. To edit, use ``edit_icalendar_instance()``: +Better practice is to use the icalendar library. The ``component`` +property gives access to the :class:`icalendar.cal.Event` object: .. code-block:: python from caldav import get_davclient from datetime import date + import datetime with get_davclient() as client: my_principal = client.get_principal() @@ -256,6 +203,7 @@ The ``event.component`` property gives easy access to the rrule={'FREQ': 'YEARLY'}) my_events = my_new_calendar.search( + event=True, start=date(2026,5,1), end=date(2026,6,1), expand=True) @@ -266,9 +214,10 @@ The ``event.component`` property gives easy access to the cal.subcomponents[0]['summary'] = "Norwegian national day celebrations" my_events[0].save() -How to do operations on components in the icalendar library is outside the scope of this tutorial. +Tasks +----- -Usually tasks and journals can be applied directly to the same calendar as the events – but some implementations (notably Zimbra) have "task lists" and "calendars" as distinct entities. To create a task list, there is a parameter ``supported_calendar_component_set`` that can be set to ``['VTODO']``. Here is a quick example that features a task: +Create a task list and work with tasks: .. code-block:: python @@ -293,11 +242,16 @@ Usually tasks and journals can be applied directly to the same calendar as the e todo=True, include_completed=True) assert len(my_tasks) == 1 +Further Reading +--------------- -There are more functionality, but if you've followed the tutorial to this point, you should already know eough to deal with the very most use-cases. - -There are some more :ref:`examples:examples` in the examples folder, particularly `basic examples `_. There is also a `scheduling examples `_ for sending, receiving and replying to invites, though this is not very well-tested so far. The example code is currently not tested nor maintained. Some of it will be moved into the documentation as tutorials or how-tos eventually. +See the :ref:`examples:examples` folder for more code, including +`basic examples `_ +and `scheduling examples `_ +for invites. -The `test code `_ also covers most of the features available, though it's not much optimized for readability (at least not as of 2025-05). +The `test code `_ +covers most features. -Tobias Brox is also working on a `command line interface `_ built around the caldav library. +There is also a `command line interface `_ +built around the caldav library. diff --git a/pyproject.toml b/pyproject.toml index 8c27bbd5..fa67d602 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -160,6 +160,9 @@ filterwarnings = [ # Treat all warnings as errors by default "error", + # Allow missing _version.py warning during development/testing (generated by hatch-vcs on build/install) + "ignore:You need to install the `build` package:UserWarning", + # Ignore deprecation warnings from external libraries we can't control # https://github.com/jawah/niquests/pull/327 https://github.com/jawah/niquests/issues/326 "ignore:.*asyncio.iscoroutinefunction.*:DeprecationWarning:niquests", @@ -171,4 +174,10 @@ filterwarnings = [ # Radicale uses deprecated importlib.abc.Traversable (Python 3.14 removal) "ignore:.*importlib.abc.Traversable.*:DeprecationWarning", + + # Show conf_private.py deprecation warning once (not as error) during migration period + "once:conf_private.py is deprecated:DeprecationWarning", + + # Zimbra test server uses HTTPS with self-signed cert (ssl_verify_cert=False) + "ignore:Unverified HTTPS request:urllib3_future.exceptions.InsecureRequestWarning", ] diff --git a/tests/README.md b/tests/README.md index 8cfdac2b..aaff6325 100644 --- a/tests/README.md +++ b/tests/README.md @@ -39,8 +39,8 @@ pytest tests/test_caldav_unit.py Test configuration uses YAML or JSON files. The configuration loader searches these locations in order: -1. `tests/test_servers.yaml` -2. `tests/test_servers.json` +1. `tests/caldav_test_servers.yaml` +2. `tests/caldav_test_servers.json` 3. `~/.config/caldav/test_servers.yaml` 4. `~/.config/caldav/test_servers.json` @@ -48,10 +48,15 @@ these locations in order: 1. Copy the example configuration: ```bash - cp tests/test_servers.yaml.example tests/test_servers.yaml + cp tests/caldav_test_servers.yaml.example tests/caldav_test_servers.yaml ``` -2. Edit `test_servers.yaml` to enable/configure servers: +2. If you want to populate it with private passwords, remember to protect it: + ```sh + chmod og-r tests/caldav_test_servers.yaml + ``` + +3. Edit `caldav_test_servers.yaml` to enable/configure servers: ```yaml test-servers: radicale: @@ -99,7 +104,7 @@ If you have an existing `conf_private.py`, a migration script is provided: python tests/tools/convert_conf_private.py ``` -This generates a `test_servers.yaml` from your existing configuration. +This generates a `caldav_test_servers.yaml` from your existing configuration. The old `conf_private.py` format is deprecated and will be removed in v3.0. ## Test Server Types @@ -213,7 +218,7 @@ coverage html # Generate HTML report If you see warnings about no test servers being configured: -1. Set up `test_servers.yaml` with your server details, or +1. Set up `caldav_test_servers.yaml` with your server details, or 2. Install embedded servers: `pip install radicale xandikos`, or 3. Use the Docker test servers diff --git a/tests/_test_absolute.py b/tests/_test_absolute.py index 64cd26b3..f7d9f4a1 100644 --- a/tests/_test_absolute.py +++ b/tests/_test_absolute.py @@ -15,7 +15,7 @@ class TestRadicale: def setup(self): URL = "http://localhost:8080/nicoe/perso/" self.client = caldav.DAVClient(URL) - self.calendar = caldav.objects.Calendar(self.client, URL) + self.calendar = caldav.Calendar(self.client, URL) def test_eventslist(self): events = self.calendar.get_events() @@ -36,7 +36,7 @@ class TestTryton: def setup(self): URL = "http://admin:admin@localhost:9080/caldav/Calendars/Test" self.client = caldav.DAVClient(URL) - self.calendar = caldav.objects.Calendar(self.client, URL) + self.calendar = caldav.Calendar(self.client, URL) def test_eventslist(self): events = self.calendar.get_events() diff --git a/tests/caldav_test_servers.yaml.example b/tests/caldav_test_servers.yaml.example new file mode 100644 index 00000000..48fa1ccb --- /dev/null +++ b/tests/caldav_test_servers.yaml.example @@ -0,0 +1,145 @@ +# Test server configuration for caldav tests +# +# Copy this file to caldav_test_servers.yaml and customize for your setup. +# See tests/README.md for documentation. +# +# Environment variables can be used with ${VAR} or ${VAR:-default} syntax. + +test-servers: + # ========================================================================= + # Embedded servers (run in-process, no external setup required) + # ========================================================================= + + radicale: + type: embedded + enabled: true + host: ${RADICALE_HOST:-localhost} + port: ${RADICALE_PORT:-5232} + username: user1 + password: "" + + xandikos: + type: embedded + enabled: true + host: ${XANDIKOS_HOST:-localhost} + port: ${XANDIKOS_PORT:-8993} + username: sometestuser + password: "" + + # ========================================================================= + # Docker servers (require docker-compose, see docker-test-servers/) + # ========================================================================= + # + # Set enabled to: + # - true: always enable + # - false: always disable + # - "auto": enable if docker is available (default for docker servers) + + baikal: + type: docker + enabled: ${TEST_BAIKAL:-auto} + host: ${BAIKAL_HOST:-localhost} + port: ${BAIKAL_PORT:-8800} + username: ${BAIKAL_USERNAME:-testuser} + password: ${BAIKAL_PASSWORD:-testpass} + # Path within the CalDAV server + # path: /dav.php + + nextcloud: + type: docker + enabled: ${TEST_NEXTCLOUD:-false} + host: ${NEXTCLOUD_HOST:-localhost} + port: ${NEXTCLOUD_PORT:-8801} + username: ${NEXTCLOUD_USERNAME:-testuser} + password: ${NEXTCLOUD_PASSWORD:-testpass} + + cyrus: + type: docker + enabled: ${TEST_CYRUS:-false} + host: ${CYRUS_HOST:-localhost} + port: ${CYRUS_PORT:-8802} + username: ${CYRUS_USERNAME:-testuser@test.local} + password: ${CYRUS_PASSWORD:-testpassword} + + sogo: + type: docker + enabled: ${TEST_SOGO:-false} + host: ${SOGO_HOST:-localhost} + port: ${SOGO_PORT:-8803} + username: ${SOGO_USERNAME:-testuser} + password: ${SOGO_PASSWORD:-testpassword} + + bedework: + type: docker + enabled: ${TEST_BEDEWORK:-false} + host: ${BEDEWORK_HOST:-localhost} + port: ${BEDEWORK_PORT:-8804} + username: ${BEDEWORK_USERNAME:-admin} + password: ${BEDEWORK_PASSWORD:-bedework} + + davical: + type: docker + enabled: ${TEST_DAVICAL:-false} + host: ${DAVICAL_HOST:-localhost} + port: ${DAVICAL_PORT:-8805} + username: ${DAVICAL_USERNAME:-testuser} + password: ${DAVICAL_PASSWORD:-testpass} + + davis: + type: docker + enabled: ${TEST_DAVIS:-false} + host: ${DAVIS_HOST:-localhost} + port: ${DAVIS_PORT:-8806} + username: ${DAVIS_USERNAME:-testuser} + password: ${DAVIS_PASSWORD:-testpass} + + ccs: + type: docker + enabled: ${TEST_CCS:-false} + host: ${CCS_HOST:-localhost} + port: ${CCS_PORT:-8807} + username: ${CCS_USERNAME:-user01} + password: ${CCS_PASSWORD:-user01} + + zimbra: + type: docker + enabled: ${TEST_ZIMBRA:-false} + host: ${ZIMBRA_HOST:-zimbra-docker.zimbra.io} + port: ${ZIMBRA_PORT:-8808} + username: ${ZIMBRA_USERNAME:-testuser@zimbra.io} + password: ${ZIMBRA_PASSWORD:-testpass} + + # ========================================================================= + # External/private servers (your own CalDAV server) + # ========================================================================= + # + # Uncomment and configure to test against your own server: + + # my-server: + # type: external + # enabled: true + # url: ${CALDAV_URL:-https://caldav.example.com/dav/} + # username: ${CALDAV_USERNAME} + # password: ${CALDAV_PASSWORD} + # # Optional: SSL verification (default: true) + # ssl_verify: true + # # Optional: specify server limitations/features + # features: + # - no-expand # Server doesn't support EXPAND + # - no-sync-token # Server doesn't support sync tokens + # - no-freebusy # Server doesn't support freebusy queries + +# ========================================================================= +# RFC6638 scheduling test users (optional) +# ========================================================================= +# +# For testing calendar scheduling (meeting invites, etc.), define +# multiple users that can send invites to each other: + +# rfc6638_users: +# - url: https://caldav.example.com/dav/user1/ +# username: user1 +# password: pass1 +# - url: https://caldav.example.com/dav/user2/ +# username: user2 +# password: pass2 diff --git a/tests/docker-test-servers/baikal/README.md b/tests/docker-test-servers/baikal/README.md index 742286e0..104cbcd3 100644 --- a/tests/docker-test-servers/baikal/README.md +++ b/tests/docker-test-servers/baikal/README.md @@ -54,12 +54,16 @@ This Baikal instance comes **pre-configured** with: ## Disabling Baikal Tests -If you want to skip Baikal tests, create `tests/conf_private.py`: +If you want to skip Baikal tests, set `enabled: false` for the `baikal` entry in `tests/caldav_test_servers.yaml`: -```python -test_baikal = False +```yaml +baikal: + type: docker + enabled: false ``` +Or use the environment variable: `TEST_BAIKAL=false`. + Or simply don't install Docker - the tests will automatically skip Baikal if Docker is not available. ## GitHub Actions (CI/CD) @@ -93,21 +97,18 @@ You can add more secrets in GitHub Actions settings for credentials. ### Test Configuration -The test suite will automatically detect and use Baikal if configured. Configuration is in: -- `tests/conf_baikal.py` - Baikal-specific configuration -- `tests/conf.py` - Main test configuration (add Baikal to `caldav_servers` list) - -To enable Baikal testing, add to `tests/conf_private.py`: +The test suite will automatically detect and use Baikal if configured. Configuration is in `tests/caldav_test_servers.yaml` (copy from `tests/caldav_test_servers.yaml.example` and customize). -```python -from tests.conf_baikal import get_baikal_config +To enable Baikal testing, set `enabled: true` (or `enabled: auto` to auto-detect Docker availability) in the YAML config: -# Add Baikal to test servers if available -baikal_conf = get_baikal_config() -if baikal_conf: - caldav_servers.append(baikal_conf) +```yaml +baikal: + type: docker + enabled: true ``` +Or use the environment variable: `TEST_BAIKAL=true`. + ## Troubleshooting ### Container won't start diff --git a/tests/docker-test-servers/ccs/README.md b/tests/docker-test-servers/ccs/README.md new file mode 100644 index 00000000..d2834149 --- /dev/null +++ b/tests/docker-test-servers/ccs/README.md @@ -0,0 +1,44 @@ +# Apple CalendarServer (CCS) Test Server + +Apple's CalendarServer (ccs-calendarserver) running in Docker for the caldav +library test suite. + +## Overview + +- **Image**: `pluies/ccs-calendarserver:latest` (Debian Jessie, Python 2) +- **Port**: 8807 (mapped to container's 8008) +- **Users**: user01/user01, user02/user02, admin/admin +- **Storage**: File-based (SQLite), ephemeral (tmpfs) + +Note: Apple CalendarServer is archived/orphaned since 2019. This Docker image +is based on Debian Jessie and Python 2. It's included for historical +compatibility testing. + +## Quick Start + +```bash +./start.sh +``` + +## Stop + +```bash +./stop.sh +``` + +## Configuration + +- `conf/caldavd.plist` - Main server config (HTTP on port 8008, no SSL) +- `conf/auth/accounts.xml` - User accounts (XML-based directory service) +- `conf/auth/augments.xml` - Calendar/addressbook enablement +- `conf/auth/resources.xml` - Resources (empty) +- `conf/auth/proxies.xml` - Proxy delegates (empty) + +## Architecture + +CCS uses UID-based principal URLs: +- Principal: `/principals/__uids__/{GUID}/` +- Calendar home: `/calendars/__uids__/{GUID}/` + +The server auto-initializes its data directory on first start. No setup script +is needed — users are defined in `accounts.xml`. diff --git a/tests/docker-test-servers/ccs/conf/auth/accounts.xml b/tests/docker-test-servers/ccs/conf/auth/accounts.xml new file mode 100644 index 00000000..6b4c8efa --- /dev/null +++ b/tests/docker-test-servers/ccs/conf/auth/accounts.xml @@ -0,0 +1,33 @@ + + + + + + + 0C8BDE62-E600-4696-83D3-8B5ECABDFD2E + 0C8BDE62-E600-4696-83D3-8B5ECABDFD2E + admin + admin + Super User + admin@example.com + + + 10000000-0000-0000-0000-000000000001 + 10000000-0000-0000-0000-000000000001 + user01 + user01 + User 01 + user01@example.com + + + 10000000-0000-0000-0000-000000000002 + 10000000-0000-0000-0000-000000000002 + user02 + user02 + User 02 + user02@example.com + + diff --git a/tests/docker-test-servers/ccs/conf/auth/augments.xml b/tests/docker-test-servers/ccs/conf/auth/augments.xml new file mode 100644 index 00000000..6ac5f9a9 --- /dev/null +++ b/tests/docker-test-servers/ccs/conf/auth/augments.xml @@ -0,0 +1,10 @@ + + + + + + Default + true + true + + diff --git a/tests/docker-test-servers/ccs/conf/auth/proxies.xml b/tests/docker-test-servers/ccs/conf/auth/proxies.xml new file mode 100644 index 00000000..945bd89e --- /dev/null +++ b/tests/docker-test-servers/ccs/conf/auth/proxies.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/tests/docker-test-servers/ccs/conf/auth/resources.xml b/tests/docker-test-servers/ccs/conf/auth/resources.xml new file mode 100644 index 00000000..b69dedb7 --- /dev/null +++ b/tests/docker-test-servers/ccs/conf/auth/resources.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/tests/docker-test-servers/ccs/conf/caldavd.plist b/tests/docker-test-servers/ccs/conf/caldavd.plist new file mode 100644 index 00000000..bcd9c8fc --- /dev/null +++ b/tests/docker-test-servers/ccs/conf/caldavd.plist @@ -0,0 +1,272 @@ + + + + + + + ServerHostName + localhost + + EnableCalDAV + + + EnableCardDAV + + + SocketFiles + + Enabled + + + + HTTPPort + 8008 + + SSLPort + 0 + + EnableSSL + + + RedirectHTTPToHTTPS + + + BindHTTPPorts + + 8008 + + + ServerRoot + ./data + + DataRoot + Data + + DatabaseRoot + Database + + DocumentRoot + Documents + + ConfigRoot + ./conf + + RunRoot + Logs/state + + FailIfUpgradeNeeded + + + UserQuota + 104857600 + + MaxCollectionsPerHome + 50 + + MaxResourcesPerCollection + 10000 + + MaxResourceSize + 1048576 + + DirectoryProxy + + Enabled + + + + + DirectoryService + + type + xml + params + + xmlFile + ./conf/auth/accounts.xml + + + + DirectoryRealmName + Test Realm + + ResourceService + + Enabled + + type + xml + params + + xmlFile + ./conf/auth/resources.xml + + + + AugmentService + + type + xml + params + + xmlFiles + + ./conf/auth/augments.xml + + + + + ProxyLoadFromFile + ./conf/auth/proxies.xml + + AdminPrincipals + + /principals/__uids__/0C8BDE62-E600-4696-83D3-8B5ECABDFD2E/ + + + EnableProxyPrincipals + + + EnableAnonymousReadRoot + + + EnablePrincipalListings + + + Authentication + + Basic + + Enabled + + AllowedOverWireUnencrypted + + + Digest + + Enabled + + + Kerberos + + Enabled + + + Wiki + + Enabled + + + + + LogRoot + Logs + + DefaultLogLevel + info + + ProcessType + Combined + + MultiProcess + + ProcessCount + 0 + + + Notifications + + Services + + AMP + + Enabled + + + + + + Scheduling + + CalDAV + + OldDraftCompatibility + + ScheduleTagCompatibility + + EnablePrivateComments + + + iSchedule + + Enabled + + + iMIP + + Enabled + + + + + FreeBusyURL + + Enabled + + TimePeriod + 14 + AnonymousAccess + + + + EnableTimezoneService + + + UsePackageTimezones + + + Sharing + + Enabled + + Calendars + + Enabled + + + + + EnableTrashCollection + + + ResponseCompression + + + Memcached + + MaxClients + 5 + memcached + memcached + + + EnableResponseCache + + + ResponseCacheTimeout + 30 + + EnableSearchAddressBook + + + DirectoryCaching + + CachingSeconds + 10 + + + + diff --git a/tests/docker-test-servers/ccs/docker-compose.yml b/tests/docker-test-servers/ccs/docker-compose.yml new file mode 100644 index 00000000..d9bcd071 --- /dev/null +++ b/tests/docker-test-servers/ccs/docker-compose.yml @@ -0,0 +1,17 @@ +services: + ccs: + image: pluies/ccs-calendarserver:latest + container_name: ccs-test + ports: + - "8807:8008" + volumes: + - ./conf:/opt/ccs/conf:ro + command: ./bin/run -n -c /opt/ccs/conf/caldavd.plist + tmpfs: + - /opt/ccs/data:size=200m + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8008/"] + interval: 10s + timeout: 5s + retries: 10 + start_period: 30s diff --git a/tests/docker-test-servers/ccs/start.sh b/tests/docker-test-servers/ccs/start.sh new file mode 100755 index 00000000..02ea1447 --- /dev/null +++ b/tests/docker-test-servers/ccs/start.sh @@ -0,0 +1,48 @@ +#!/bin/bash +# Quick start script for Apple CalendarServer (CCS) test server +# +# Usage: ./start.sh + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Creating and starting Apple CalendarServer container..." +docker-compose up -d + +echo "Waiting for CCS to be healthy..." +for i in $(seq 1 60); do + if docker inspect --format='{{.State.Health.Status}}' ccs-test 2>/dev/null | grep -q healthy; then + echo "CCS is healthy!" + break + fi + if [ "$i" -eq 60 ]; then + echo "Timeout waiting for CCS to become healthy" + echo "Container logs:" + docker-compose logs ccs + exit 1 + fi + sleep 2 +done + +# Verify CalDAV is responding +echo "Verifying CalDAV endpoint..." +if curl -s -o /dev/null -w "%{http_code}" -u user01:user01 -X PROPFIND -H "Depth: 0" http://localhost:8807/ | grep -q "207"; then + echo "CalDAV is responding correctly" +else + echo "Warning: CalDAV endpoint not responding as expected" + echo "Container logs:" + docker-compose logs ccs +fi + +echo "" +echo "CCS is running on http://localhost:8807/" +echo " Users: user01/user01, user02/user02, admin/admin" +echo "" +echo "Run tests from project root:" +echo " cd ../../.." +echo " TEST_CCS=true pytest" +echo "" +echo "To stop CCS: ./stop.sh" +echo "To view logs: docker-compose logs -f ccs" diff --git a/tests/docker-test-servers/ccs/stop.sh b/tests/docker-test-servers/ccs/stop.sh new file mode 100755 index 00000000..20673a59 --- /dev/null +++ b/tests/docker-test-servers/ccs/stop.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# Stop script for Apple CalendarServer (CCS) test server + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Stopping CCS and removing volumes..." +docker-compose down -v + +echo "CCS stopped and volumes removed" diff --git a/tests/docker-test-servers/cyrus/README.md b/tests/docker-test-servers/cyrus/README.md index 85f0a483..9c4dfdd2 100644 --- a/tests/docker-test-servers/cyrus/README.md +++ b/tests/docker-test-servers/cyrus/README.md @@ -59,12 +59,16 @@ This Cyrus instance comes **pre-configured** with: ## Disabling Cyrus Tests -If you want to skip Cyrus tests, create `tests/conf_private.py`: +If you want to skip Cyrus tests, set `enabled: false` for the `cyrus` entry in `tests/caldav_test_servers.yaml`: -```python -test_cyrus = False +```yaml +cyrus: + type: docker + enabled: false ``` +Or use the environment variable: `TEST_CYRUS=false`. + Or simply don't install Docker - the tests will automatically skip Cyrus if Docker is not available. ## Troubleshooting diff --git a/tests/docker-test-servers/cyrus/docker-compose.yml b/tests/docker-test-servers/cyrus/docker-compose.yml index 828ba81c..5e0aa3ad 100644 --- a/tests/docker-test-servers/cyrus/docker-compose.yml +++ b/tests/docker-test-servers/cyrus/docker-compose.yml @@ -7,7 +7,7 @@ services: ports: - "8802:8080" # HTTP (CalDAV/CardDAV/JMAP) - "8143:8143" # IMAP - - "8001:8001" # Management API + - "18001:8001" # Management API environment: - DEFAULTDOMAIN=example.com - SERVERNAME=cyrus-test diff --git a/tests/docker-test-servers/davical/README.md b/tests/docker-test-servers/davical/README.md index 5be00f93..51d011b3 100644 --- a/tests/docker-test-servers/davical/README.md +++ b/tests/docker-test-servers/davical/README.md @@ -1,36 +1,30 @@ # DAViCal Test Server -DAViCal is a CalDAV server that uses PostgreSQL as its backend. This Docker configuration provides a complete DAViCal server for testing. +[DAViCal](https://www.davical.org/) is a CalDAV server that uses PostgreSQL as its backend. This Docker configuration provides a complete DAViCal server for testing. ## Quick Start ```bash cd tests/docker-test-servers/davical -docker-compose up -d +./start.sh ``` -Wait about 30 seconds for the database to initialize, then the server will be available. +This will: +1. Start PostgreSQL and DAViCal containers +2. Wait for database initialization (~60s) +3. Create a test user with CalDAV access +4. Verify connectivity ## Configuration -- **URL**: http://localhost:8805/davical/caldav.php -- **Admin User**: admin -- **Admin Password**: testpass (set via DAVICAL_ADMIN_PASS) - -## Creating Test Users - -After the server starts, you can create test users via the admin interface: - -1. Navigate to http://localhost:8805/davical/admin.php -2. Login with admin / testpass -3. Create a new user (e.g., testuser / testpass) - -Alternatively, the container may pre-create a test user depending on the image configuration. +- **URL**: http://localhost:8805/caldav.php/ +- **Admin**: admin / testpass +- **Test User**: testuser / testpass ## CalDAV Endpoints -- **Principal URL**: `http://localhost:8805/davical/caldav.php/{username}/` -- **Calendar Home**: `http://localhost:8805/davical/caldav.php/{username}/calendar/` +- **Principal URL**: `http://localhost:8805/caldav.php/{username}/` +- **Calendar Home**: `http://localhost:8805/caldav.php/{username}/calendar/` (auto-created on first MKCALENDAR) ## Environment Variables @@ -38,12 +32,20 @@ Alternatively, the container may pre-create a test user depending on the image c |----------|---------|-------------| | `DAVICAL_HOST` | localhost | Server hostname | | `DAVICAL_PORT` | 8805 | HTTP port | -| `DAVICAL_USERNAME` | admin | Test username | +| `DAVICAL_USERNAME` | testuser | Test username | | `DAVICAL_PASSWORD` | testpass | Test password | ## Docker Image -This configuration uses the [tuxnvape/davical-standalone](https://hub.docker.com/r/tuxnvape/davical-standalone) Docker image, which provides a complete DAViCal installation with PostgreSQL. +Uses [tuxnvape/davical-standalone](https://hub.docker.com/r/tuxnvape/davical-standalone) with a separate [postgres:16-alpine](https://hub.docker.com/_/postgres) container. Despite the image name, it requires an external PostgreSQL service. + +## User Setup Details + +DAViCal stores users in PostgreSQL: +- `usr` table: `(username, password, fullname, email)` — passwords are prefixed with `**` +- `principal` table: `(type_id, user_no, displayname)` — type_id 1 = Person + +The `setup_davical.sh` script handles user creation automatically. ## Troubleshooting @@ -53,13 +55,13 @@ Check if port 8805 is already in use: lsof -i :8805 ``` -### Database initialization -The first startup may take 30+ seconds while PostgreSQL initializes. Check logs: +### Database initialization takes long +The first startup takes ~60s for PostgreSQL initialization plus DAViCal schema setup. Check logs: ```bash docker-compose logs -f ``` ### Testing connectivity ```bash -curl -u admin:testpass http://localhost:8805/davical/caldav.php/admin/ +curl -X PROPFIND -H "Depth: 0" -u testuser:testpass http://localhost:8805/caldav.php/testuser/ ``` diff --git a/tests/docker-test-servers/davical/docker-compose.yml b/tests/docker-test-servers/davical/docker-compose.yml index 50a009e6..731b2cf0 100644 --- a/tests/docker-test-servers/davical/docker-compose.yml +++ b/tests/docker-test-servers/davical/docker-compose.yml @@ -1,18 +1,35 @@ services: + db: + image: postgres:16-alpine + container_name: davical-db + environment: + - POSTGRES_PASSWORD=davical + tmpfs: + - /var/lib/postgresql/data:size=500m + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 3s + retries: 10 + start_period: 10s + davical: image: tuxnvape/davical-standalone:latest container_name: davical-test ports: - "8805:80" environment: - - POSTGRES_PASSWORD=davical - - DAVICAL_ADMIN_PASS=testpass - tmpfs: - # Make the container ephemeral - data is lost on restart - - /var/lib/postgresql/data:size=500m + - DBHOST=db + - PASSDAVDB=davical + - PGSQL_ROOT_PASS=davical + - ADMINDAVICALPASS=testpass + - TIME_ZONE=UTC + depends_on: + db: + condition: service_healthy healthcheck: test: ["CMD", "curl", "-f", "http://localhost/davical/"] interval: 10s timeout: 5s - retries: 5 - start_period: 30s + retries: 10 + start_period: 45s diff --git a/tests/docker-test-servers/davical/setup_davical.sh b/tests/docker-test-servers/davical/setup_davical.sh new file mode 100755 index 00000000..20cfd6b1 --- /dev/null +++ b/tests/docker-test-servers/davical/setup_davical.sh @@ -0,0 +1,82 @@ +#!/bin/bash +# Setup script for DAViCal test server +# Creates a test user with CalDAV access via PostgreSQL. +# +# DAViCal stores users in the 'usr' table with passwords prefixed by '**'. +# Each user needs a corresponding entry in the 'principal' table. + +set -e + +DB_CONTAINER="davical-db" +DAVICAL_CONTAINER="davical-test" +DB_USER="davical_dba" +DB_NAME="davical" +TEST_USER="testuser" +TEST_PASSWORD="testpass" + +run_sql() { + docker exec "$DB_CONTAINER" psql -U "$DB_USER" "$DB_NAME" -tAc "$1" 2>&1 +} + +echo "Waiting for DAViCal to be accessible..." +max_attempts=60 +for i in $(seq 1 $max_attempts); do + if curl -s -w "%{http_code}" "http://localhost:8805/caldav.php/" 2>/dev/null | grep -q "401"; then + echo "DAViCal HTTP server is ready" + break + fi + if [ $i -eq $max_attempts ]; then + echo "DAViCal did not become ready in time" + echo "Check logs with: docker-compose logs davical" + exit 1 + fi + echo -n "." + sleep 3 +done + +echo "" +echo "Creating test user..." +# Check if user already exists +EXISTING=$(run_sql "SELECT username FROM usr WHERE username='${TEST_USER}'") +if [ -n "$EXISTING" ]; then + echo "User '${TEST_USER}' already exists, skipping creation" +else + run_sql "INSERT INTO usr (username, password, fullname, email) VALUES ('${TEST_USER}', '**${TEST_PASSWORD}', 'Test User', '${TEST_USER}@example.com')" + echo "User created" +fi + +echo "Creating principal entry..." +EXISTING_PRINCIPAL=$(run_sql "SELECT principal_id FROM principal p JOIN usr u ON p.user_no = u.user_no WHERE u.username='${TEST_USER}'") +if [ -n "$EXISTING_PRINCIPAL" ]; then + echo "Principal already exists, skipping" +else + run_sql "INSERT INTO principal (type_id, user_no, displayname) SELECT 1, user_no, fullname FROM usr WHERE username='${TEST_USER}'" + echo "Principal created" +fi + +echo "" +echo "Verifying CalDAV access..." +max_caldav_attempts=10 +for i in $(seq 1 $max_caldav_attempts); do + RESPONSE=$(curl -s -X PROPFIND -H "Depth: 0" -u "${TEST_USER}:${TEST_PASSWORD}" "http://localhost:8805/caldav.php/${TEST_USER}/" 2>/dev/null) + if echo "$RESPONSE" | grep -qi "multistatus\|collection"; then + echo "CalDAV is accessible" + break + fi + if [ $i -eq $max_caldav_attempts ]; then + echo "Warning: CalDAV access test failed after ${max_caldav_attempts} attempts" + echo "Response: $RESPONSE" + echo "Continuing anyway..." + break + fi + echo -n "." + sleep 2 +done + +echo "" +echo "DAViCal setup complete!" +echo "" +echo "Credentials:" +echo " Admin: admin / testpass" +echo " Test user: ${TEST_USER} / ${TEST_PASSWORD}" +echo " CalDAV URL: http://localhost:8805/caldav.php/${TEST_USER}/" diff --git a/tests/docker-test-servers/davical/start.sh b/tests/docker-test-servers/davical/start.sh new file mode 100755 index 00000000..5b9edb54 --- /dev/null +++ b/tests/docker-test-servers/davical/start.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Quick start script for DAViCal test server +# +# Usage: ./start.sh + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Creating and starting DAViCal containers (PostgreSQL + DAViCal)..." +docker-compose up -d + +echo "Waiting for DAViCal to be healthy (DB init takes ~60s)..." +timeout 180 bash -c 'until docker inspect --format="{{.State.Health.Status}}" davical-test 2>/dev/null | grep -q "healthy"; do echo -n "."; sleep 5; done' || { + echo "" + echo "Error: DAViCal did not become healthy in time" + echo "Check logs with: docker-compose logs" + exit 1 +} +echo "" + +echo "Running setup..." +bash "$SCRIPT_DIR/setup_davical.sh" + +echo "" +echo "Run tests from project root:" +echo " cd ../../.." +echo " TEST_DAVICAL=true pytest" +echo "" +echo "To stop DAViCal: ./stop.sh" +echo "To view logs: docker-compose logs -f" diff --git a/tests/docker-test-servers/davical/stop.sh b/tests/docker-test-servers/davical/stop.sh new file mode 100755 index 00000000..7d619717 --- /dev/null +++ b/tests/docker-test-servers/davical/stop.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# Stop script for DAViCal test server + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Stopping DAViCal and removing volumes..." +docker-compose down -v + +echo "DAViCal stopped and volumes removed" diff --git a/tests/docker-test-servers/davis/README.md b/tests/docker-test-servers/davis/README.md new file mode 100644 index 00000000..644b7db3 --- /dev/null +++ b/tests/docker-test-servers/davis/README.md @@ -0,0 +1,70 @@ +# Davis Test Server + +[Davis](https://github.com/tchapi/davis) is a modern admin interface and standalone server for sabre/dav (Symfony 7). This Docker configuration uses the standalone image which bundles PHP-FPM + Caddy with a SQLite backend. + +## Quick Start + +```bash +cd tests/docker-test-servers/davis +./start.sh +``` + +This will: +1. Start the Davis container +2. Wait for it to be healthy +3. Run database migrations +4. Create a test user with CalDAV access +5. Verify connectivity + +## Configuration + +- **URL**: http://localhost:8806/dav/ +- **Admin**: admin / admin +- **Test User**: testuser / testpass + +## CalDAV Endpoints + +- **DAV root**: `http://localhost:8806/dav/` +- **Principal URL**: `http://localhost:8806/dav/principals/testuser/` +- **Calendar Home**: `http://localhost:8806/dav/calendars/testuser/` + +## Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `DAVIS_HOST` | localhost | Server hostname | +| `DAVIS_PORT` | 8806 | HTTP port | +| `DAVIS_USERNAME` | testuser | Test username | +| `DAVIS_PASSWORD` | testpass | Test password | + +## Docker Image + +Uses [ghcr.io/tchapi/davis-standalone](https://github.com/tchapi/davis) — a single container with PHP-FPM, Caddy, and SQLite. + +## User Setup Details + +Davis uses sabre/dav which requires entries in multiple database tables: +- `users` table: `(username, digesta1)` where digesta1 = `md5("username:realm:password")` +- `principals` table: `(uri, email, displayname)` where uri = `principals/username` +- Calendar-proxy principals for delegation support + +The `setup_davis.sh` script handles all of this automatically. + +## Troubleshooting + +### Container won't start +Check if port 8806 is already in use: +```bash +lsof -i :8806 +``` + +### Database issues +Check container logs: +```bash +docker-compose logs -f +``` + +### Testing connectivity +```bash +curl -X PROPFIND -H "Depth: 0" -u testuser:testpass http://localhost:8806/dav/ +``` diff --git a/tests/docker-test-servers/davis/docker-compose.yml b/tests/docker-test-servers/davis/docker-compose.yml new file mode 100644 index 00000000..f5c0fd1b --- /dev/null +++ b/tests/docker-test-servers/davis/docker-compose.yml @@ -0,0 +1,27 @@ +services: + davis: + image: ghcr.io/tchapi/davis-standalone:latest + container_name: davis-test + ports: + - "8806:9000" + environment: + - APP_ENV=prod + - DATABASE_DRIVER=sqlite + - DATABASE_URL=sqlite:////data/davis-database.db + - ADMIN_LOGIN=admin + - ADMIN_PASSWORD=admin + - CALDAV_ENABLED=true + - CARDDAV_ENABLED=true + - WEBDAV_ENABLED=false + - AUTH_REALM=SabreDAV + - AUTH_METHOD=Basic + - APP_SECRET=testserversecret123 + - INVITE_FROM_ADDRESS=test@example.com + tmpfs: + - /data:size=100m + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/dav/"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s diff --git a/tests/docker-test-servers/davis/setup_davis.sh b/tests/docker-test-servers/davis/setup_davis.sh new file mode 100755 index 00000000..009bf6e9 --- /dev/null +++ b/tests/docker-test-servers/davis/setup_davis.sh @@ -0,0 +1,91 @@ +#!/bin/bash +# Setup script for Davis test server +# Creates a test user with CalDAV access. +# +# Davis uses sabre/dav which requires entries in both the 'users' and +# 'principals' tables. The digesta1 field is md5(username:realm:password). +# +# Since the container has no sqlite3 CLI, we use Symfony's dbal:run-sql +# and doctrine:migrations:migrate console commands instead. + +set -e + +CONTAINER_NAME="davis-test" +TEST_USER="testuser" +TEST_PASSWORD="testpass" +AUTH_REALM="SabreDAV" +CONSOLE="php /var/www/davis/bin/console" + +run_sql() { + docker exec "$CONTAINER_NAME" $CONSOLE dbal:run-sql "$1" 2>&1 +} + +echo "Waiting for Davis container to be running..." +max_attempts=30 +for i in $(seq 1 $max_attempts); do + if docker exec "$CONTAINER_NAME" true 2>/dev/null; then + echo "Container is running" + break + fi + if [ $i -eq $max_attempts ]; then + echo "Container did not start in time" + exit 1 + fi + echo -n "." + sleep 2 +done + +echo "" +echo "Initializing SQLite database..." +# Create the DB file and ensure it's writable by all processes in the container +docker exec "$CONTAINER_NAME" touch /data/davis-database.db +docker exec "$CONTAINER_NAME" chmod 666 /data/davis-database.db +docker exec "$CONTAINER_NAME" chmod 777 /data/ + +echo "Running database migrations..." +docker exec "$CONTAINER_NAME" $CONSOLE doctrine:migrations:migrate --no-interaction 2>&1 + +echo "" +echo "Computing digest hash..." +DIGEST=$(echo -n "${TEST_USER}:${AUTH_REALM}:${TEST_PASSWORD}" | md5sum | awk '{print $1}') +echo "Digest: ${DIGEST}" + +echo "" +echo "Creating test user in database..." +run_sql "INSERT INTO users (username, digesta1) VALUES ('${TEST_USER}', '${DIGEST}')" + +echo "Creating principal entries..." +# sabre/dav requires principal entries for CalDAV to work +# The principals table has is_main and is_admin boolean columns +run_sql "INSERT INTO principals (uri, email, displayname, is_main, is_admin) VALUES ('principals/${TEST_USER}', '${TEST_USER}@example.com', 'Test User', 1, 0)" + +# Calendar-proxy principals that sabre/dav expects for delegation +run_sql "INSERT INTO principals (uri, email, displayname, is_main, is_admin) VALUES ('principals/${TEST_USER}/calendar-proxy-read', NULL, NULL, 0, 0)" +run_sql "INSERT INTO principals (uri, email, displayname, is_main, is_admin) VALUES ('principals/${TEST_USER}/calendar-proxy-write', NULL, NULL, 0, 0)" + +echo "" +echo "Verifying CalDAV access..." +max_caldav_attempts=15 +for i in $(seq 1 $max_caldav_attempts); do + RESPONSE=$(curl -s -X PROPFIND -H "Depth: 0" -u "${TEST_USER}:${TEST_PASSWORD}" http://localhost:8806/dav/ 2>/dev/null) + if echo "$RESPONSE" | grep -qi "multistatus\|collection"; then + echo "CalDAV is accessible" + break + fi + if [ $i -eq $max_caldav_attempts ]; then + echo "Warning: CalDAV access test failed after ${max_caldav_attempts} attempts" + echo "Response: $RESPONSE" + echo "Continuing anyway - the user may need to debug..." + break + fi + echo -n "." + sleep 2 +done + +echo "" +echo "Davis setup complete!" +echo "" +echo "Credentials:" +echo " Admin: admin / admin" +echo " Test user: ${TEST_USER} / ${TEST_PASSWORD}" +echo " CalDAV URL: http://localhost:8806/dav/" diff --git a/tests/docker-test-servers/davis/start.sh b/tests/docker-test-servers/davis/start.sh new file mode 100755 index 00000000..3e0235b0 --- /dev/null +++ b/tests/docker-test-servers/davis/start.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# Quick start script for Davis test server +# +# Usage: ./start.sh + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Creating and starting Davis container..." +docker-compose up -d + +# The DB doesn't exist yet on a fresh tmpfs, so the healthcheck will fail +# until setup_davis.sh creates and migrates it. We just wait for the +# container processes to be running, then run setup. +echo "Waiting for container to be running..." +sleep 3 + +echo "Running setup (creates DB, runs migrations, creates test user)..." +bash "$SCRIPT_DIR/setup_davis.sh" + +echo "" +echo "Run tests from project root:" +echo " cd ../../.." +echo " TEST_DAVIS=true pytest" +echo "" +echo "To stop Davis: ./stop.sh" +echo "To view logs: docker-compose logs -f davis" diff --git a/tests/docker-test-servers/davis/stop.sh b/tests/docker-test-servers/davis/stop.sh new file mode 100755 index 00000000..36ae45da --- /dev/null +++ b/tests/docker-test-servers/davis/stop.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# Stop script for Davis test server + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Stopping Davis and removing volumes..." +docker-compose down -v + +echo "Davis stopped and volumes removed" diff --git a/tests/docker-test-servers/nextcloud/README.md b/tests/docker-test-servers/nextcloud/README.md index 023a16aa..44e696b8 100644 --- a/tests/docker-test-servers/nextcloud/README.md +++ b/tests/docker-test-servers/nextcloud/README.md @@ -60,12 +60,16 @@ This Nextcloud instance comes **pre-configured** with: ## Disabling Nextcloud Tests -If you want to skip Nextcloud tests, create `tests/conf_private.py`: +If you want to skip Nextcloud tests, set `enabled: false` for the `nextcloud` entry in `tests/caldav_test_servers.yaml`: -```python -test_nextcloud = False +```yaml +nextcloud: + type: docker + enabled: false ``` +Or use the environment variable: `TEST_NEXTCLOUD=false`. + Or simply don't install Docker - the tests will automatically skip Nextcloud if Docker is not available. ## Troubleshooting diff --git a/tests/docker-test-servers/sogo/README.md b/tests/docker-test-servers/sogo/README.md index 0940b933..a9fce51e 100644 --- a/tests/docker-test-servers/sogo/README.md +++ b/tests/docker-test-servers/sogo/README.md @@ -58,12 +58,16 @@ This SOGo instance comes **pre-configured** with: ## Disabling SOGo Tests -If you want to skip SOGo tests, create `tests/conf_private.py`: +If you want to skip SOGo tests, set `enabled: false` for the `sogo` entry in `tests/caldav_test_servers.yaml`: -```python -test_sogo = False +```yaml +sogo: + type: docker + enabled: false ``` +Or use the environment variable: `TEST_SOGO=false`. + Or simply don't install Docker - the tests will automatically skip SOGo if Docker is not available. ## Troubleshooting diff --git a/tests/docker-test-servers/zimbra/README.md b/tests/docker-test-servers/zimbra/README.md new file mode 100644 index 00000000..05e34cfc --- /dev/null +++ b/tests/docker-test-servers/zimbra/README.md @@ -0,0 +1,59 @@ +# Zimbra CalDAV Test Server + +Zimbra Collaboration Suite running in Docker for the caldav library test suite. + +## Overview + +- **Image**: `zimbra/zcs-foss:latest` (Zimbra 8.8.3 pre-installed) +- **Port**: 8808 (mapped to container's 443/HTTPS) +- **Users**: testuser@zimbra.io / testpass, testuser2@zimbra.io / testpass +- **Protocol**: HTTPS with self-signed certificate (`ssl_verify_cert=False`) + +## Resource Requirements + +Zimbra is heavyweight compared to other test servers: + +- **RAM**: ~6GB minimum +- **Disk**: ~3GB for image +- **First startup**: ~5-10 minutes (Zimbra configuration via zmsetup.pl) +- **Subsequent startups**: ~2-3 minutes (services restart only) + +The container runs in **privileged mode** (required for dnsmasq and service +management). This server is treated as "on-demand" and is not started +automatically. + +## Quick Start + +```bash +./start.sh +``` + +The start script will: +1. Add `zimbra-docker.zimbra.io` to `/etc/hosts` (requires sudo) +2. Start the container +3. Wait for Zimbra setup to complete +4. Create test users via `zmprov` +5. Verify CalDAV endpoint accessibility + +## Stop + +```bash +./stop.sh +``` + +## Running Tests + +```bash +cd ../../.. +TEST_ZIMBRA=true pytest tests/test_caldav.py -k Zimbra -v +``` + +## Notes + +- The container hostname must be `zimbra-docker.zimbra.io` — Zimbra's nginx + proxy rejects requests with non-matching Host headers. +- A `/etc/hosts` entry mapping `zimbra-docker.zimbra.io` to `127.0.0.1` is + required. The start script adds this automatically. +- The container uses HTTPS with a self-signed certificate. The test server + class sets `ssl_verify_cert=False` to handle this. +- Usernames are in email format: `testuser@zimbra.io` diff --git a/tests/docker-test-servers/zimbra/docker-compose.yml b/tests/docker-test-servers/zimbra/docker-compose.yml new file mode 100644 index 00000000..6f5a41bb --- /dev/null +++ b/tests/docker-test-servers/zimbra/docker-compose.yml @@ -0,0 +1,26 @@ +services: + zimbra: + image: zimbra/zcs-foss:latest + container_name: zimbra-test + hostname: zimbra-docker + dns: + - 127.0.0.1 + - 8.8.8.8 + ports: + - "8808:443" + privileged: true + command: /zimbra/init + environment: + - ZIMBRA_HOST_NAME=zimbra-docker.zimbra.io + - ZIMBRA_DEFAULT_DOMAIN=zimbra.io + - TIME_ZONE_ID=UTC + - TZDATA_AREA=Etc + - TZDATA_ZONE=UTC + - ADMIN_PW=testpass123 + - LDAP_ADMIN_PW=testpass123 + - LDAP_AMAVIS_PW=testpass123 + - LDAP_POSTFIX_PW=testpass123 + - LDAP_REPLICATION_PW=testpass123 + - LDAP_ROOT_PW=testpass123 + - LDAP_BES_PW=testpass123 + - LDAP_NGINX_PW=testpass123 diff --git a/tests/docker-test-servers/zimbra/start.sh b/tests/docker-test-servers/zimbra/start.sh new file mode 100755 index 00000000..014809ed --- /dev/null +++ b/tests/docker-test-servers/zimbra/start.sh @@ -0,0 +1,77 @@ +#!/bin/bash +# Quick start script for Zimbra CalDAV test server +# +# WARNING: First run takes ~5-10 minutes (Zimbra configuration). +# The container requires ~6GB of RAM and runs in privileged mode. +# +# Usage: ./start.sh + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +ZIMBRA_FQDN="zimbra-docker.zimbra.io" +ZIMBRA_DOMAIN="zimbra.io" + +# Ensure hostname resolves to localhost (Zimbra nginx requires matching Host header) +if ! grep -q "$ZIMBRA_FQDN" /etc/hosts 2>/dev/null; then + echo "Adding $ZIMBRA_FQDN to /etc/hosts (requires sudo)..." + echo "127.0.0.1 $ZIMBRA_FQDN" | sudo tee -a /etc/hosts > /dev/null +fi + +echo "Creating and starting Zimbra container..." +docker-compose up -d + +echo "Waiting for Zimbra setup to complete (this may take up to 15 minutes on first run)..." +for i in $(seq 1 180); do + if docker logs zimbra-test 2>&1 | grep -q "SETUP COMPLETE"; then + echo "Zimbra setup is complete!" + break + fi + if ! docker ps -q -f name=zimbra-test | grep -q .; then + echo "ERROR: Zimbra container stopped unexpectedly" + echo "Container logs (last 30 lines):" + docker-compose logs --tail=30 zimbra + exit 1 + fi + if [ "$i" -eq 180 ]; then + echo "Timeout waiting for Zimbra setup" + echo "Container logs (last 50 lines):" + docker-compose logs --tail=50 zimbra + exit 1 + fi + sleep 5 +done + +# Wait a bit for all services to stabilize +sleep 10 + +# Create test users (ignore errors if they already exist) +echo "Creating test users..." +docker exec zimbra-test su - zimbra -c "zmprov ca testuser@$ZIMBRA_DOMAIN testpass" 2>/dev/null || \ + echo " testuser already exists (or creation failed)" +docker exec zimbra-test su - zimbra -c "zmprov ca testuser2@$ZIMBRA_DOMAIN testpass" 2>/dev/null || \ + echo " testuser2 already exists (or creation failed)" + +# Verify CalDAV is responding +echo "Verifying CalDAV endpoint..." +if curl -sk -o /dev/null -w "%{http_code}" -u "testuser@$ZIMBRA_DOMAIN:testpass" "https://$ZIMBRA_FQDN:8808/dav/" | grep -qE "200|207|301|302|401"; then + echo "CalDAV is responding" +else + echo "Warning: CalDAV endpoint not responding as expected" + echo "Container logs (last 20 lines):" + docker-compose logs --tail=20 zimbra +fi + +echo "" +echo "Zimbra is running on https://$ZIMBRA_FQDN:8808/" +echo " Users: testuser@$ZIMBRA_DOMAIN / testpass" +echo " testuser2@$ZIMBRA_DOMAIN / testpass" +echo "" +echo "Run tests from project root:" +echo " cd ../../.." +echo " TEST_ZIMBRA=true pytest tests/test_caldav.py -k Zimbra -v" +echo "" +echo "To stop Zimbra: ./stop.sh" +echo "To view logs: docker-compose logs -f zimbra" diff --git a/tests/docker-test-servers/zimbra/stop.sh b/tests/docker-test-servers/zimbra/stop.sh new file mode 100755 index 00000000..8763298b --- /dev/null +++ b/tests/docker-test-servers/zimbra/stop.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# Stop script for Zimbra CalDAV test server + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +echo "Stopping Zimbra and removing volumes..." +docker-compose down -v + +echo "Zimbra stopped and volumes removed" diff --git a/tests/fixture_helpers.py b/tests/fixture_helpers.py index 4bcb54ff..1ff42787 100644 --- a/tests/fixture_helpers.py +++ b/tests/fixture_helpers.py @@ -16,23 +16,189 @@ async def _maybe_await(result: Any) -> Any: return result -async def get_or_create_test_calendar( +def _build_make_calendar_kwargs( + calendar_name: str | None, + cal_id: str | None, + supported_calendar_component_set: list[str] | None, +) -> dict[str, Any]: + """Build kwargs dict for principal.make_calendar().""" + kwargs: dict[str, Any] = {} + if calendar_name is not None: + kwargs["name"] = calendar_name + if cal_id: + kwargs["cal_id"] = cal_id + if supported_calendar_component_set: + kwargs["supported_calendar_component_set"] = supported_calendar_component_set + return kwargs + + +def _filter_calendars_by_component_set( + calendars: list[Any], + supported_calendar_component_set: list[str], + get_properties_fn: Any = None, +) -> list[Any] | None: + """Filter calendars by supported component set. + + Uses property lookup first, then URL-based heuristics as fallback. + Returns None if no matching calendars found (caller should skip test). + + Args: + calendars: List of calendar objects to filter + supported_calendar_component_set: Required component types + get_properties_fn: Callable that takes (calendar, keys) and returns + properties dict. If None, uses calendar.get_properties() directly. + """ + comp_set_key = "{urn:ietf:params:xml:ns:caldav}supported-calendar-component-set" + + matching_calendars = [] + for c in calendars: + try: + if get_properties_fn: + props = get_properties_fn(c, [comp_set_key]) + else: + props = c.get_properties([comp_set_key]) + cal_components = props.get(comp_set_key, []) + if cal_components and all( + comp in cal_components for comp in supported_calendar_component_set + ): + matching_calendars.append(c) + except Exception: + pass + + # Fallback: URL/name pattern heuristics (some servers like Zimbra don't + # return the supported-calendar-component-set property) + if not matching_calendars: + for c in calendars: + url_path = str(c.url).lower() + if "VTODO" in supported_calendar_component_set: + if "/tasks/" in url_path or "_tasks/" in url_path: + matching_calendars.append(c) + elif "VJOURNAL" in supported_calendar_component_set: + if "/journal" in url_path or "_journal" in url_path: + matching_calendars.append(c) + + return matching_calendars or None + + +def _find_test_calendar( + calendars: list[Any], + get_properties_fn: Any = None, +) -> Any: + """Find a dedicated test calendar by display name, or return first calendar. + + Args: + calendars: List of calendar objects to search + get_properties_fn: Callable that takes (calendar, keys) and returns + properties dict. If None, uses calendar.get_properties() directly. + """ + for c in calendars: + try: + if get_properties_fn: + props = get_properties_fn(c, []) + else: + props = c.get_properties([]) + display_name = props.get("{DAV:}displayname", "") + if "pythoncaldav-test" in str(display_name): + return c + except Exception: + pass + return calendars[0] if calendars else None + + +def get_or_create_test_calendar( client: Any, principal: Any, - calendar_name: str = "pythoncaldav-test", + calendar_name: str | None = "pythoncaldav-test", cal_id: str | None = None, + supported_calendar_component_set: list[str] | None = None, ) -> tuple[Any, bool]: """ - Get or create a test calendar, with fallback to existing calendars. + Get or create a test calendar (sync version), with fallback to existing calendars. + + Args: + client: The DAV client + principal: The principal object (or None to skip principal-based creation) + calendar_name: Name for the test calendar, or None to skip setting name + cal_id: Optional calendar ID + supported_calendar_component_set: Component types this calendar should support + + Returns: + Tuple of (calendar, was_created) where was_created indicates if + we created the calendar (and should clean it up) or are using + an existing one. + """ + from caldav.lib import error + + calendar = None + created = False + + ## First of all, check if the server test config specifies that we + ## should use a dedicated calendar. This can be specified in the features + ## as for now. + test_cal_info = self.expected_features.is_supported('test-calendar', return_type=dict) + if 'name' in test_cal_info or 'cal_url' in test.cal_info or 'cal_id' in test.cal_info: + return principal.calendar(**test_cal_info) + + # Check if server supports calendar creation via features + supports_create = True + if hasattr(client, "features") and client.features: + supports_create = client.features.is_supported("create-calendar") + + if supports_create and principal is not None: + try: + kwargs = _build_make_calendar_kwargs( + calendar_name, cal_id, supported_calendar_component_set + ) + calendar = principal.make_calendar(**kwargs) + created = True + except (error.MkcalendarError, error.AuthorizationError, error.NotFoundError): + # Creation failed - try to get by cal_id if available + if cal_id: + try: + calendar = principal.calendar(cal_id=cal_id) + except Exception: + pass - This implements the same logic as the sync _fixCalendar_ method, - providing safeguards against accidentally overwriting user data. + if calendar is None: + # Fall back to finding an existing calendar + calendars = None + + if principal is not None: + try: + calendars = principal.get_calendars() + except (error.NotFoundError, error.AuthorizationError): + pass + + if calendars: + if supported_calendar_component_set: + filtered = _filter_calendars_by_component_set( + calendars, supported_calendar_component_set + ) + if filtered is None: + return None, False + calendars = filtered + + calendar = _find_test_calendar(calendars) + + return calendar, created + + +async def aget_or_create_test_calendar( + client: Any, + principal: Any, + calendar_name: str | None = "pythoncaldav-test", + cal_id: str | None = None, + supported_calendar_component_set: list[str] | None = None, +) -> tuple[Any, bool]: + """ + Get or create a test calendar (async version), with fallback to existing calendars. Args: client: The DAV client (sync or async) principal: The principal object (or None to skip principal-based creation) - calendar_name: Name for the test calendar + calendar_name: Name for the test calendar, or None to skip setting name cal_id: Optional calendar ID + supported_calendar_component_set: Component types this calendar should support Returns: Tuple of (calendar, was_created) where was_created indicates if @@ -50,15 +216,19 @@ async def get_or_create_test_calendar( supports_create = client.features.is_supported("create-calendar") if supports_create and principal is not None: - # Try to create a new calendar try: - calendar = await _maybe_await( - principal.make_calendar(name=calendar_name, cal_id=cal_id) + kwargs = _build_make_calendar_kwargs( + calendar_name, cal_id, supported_calendar_component_set ) + calendar = await _maybe_await(principal.make_calendar(**kwargs)) created = True except (error.MkcalendarError, error.AuthorizationError, error.NotFoundError): - # Creation failed - fall back to finding existing calendar - pass + # Creation failed - try to get by cal_id if available + if cal_id: + try: + calendar = await _maybe_await(principal.calendar(cal_id=cal_id)) + except Exception: + pass if calendar is None: # Fall back to finding an existing calendar @@ -71,6 +241,40 @@ async def get_or_create_test_calendar( pass if calendars: + if supported_calendar_component_set: + + async def async_get_props(cal: Any, keys: list[str]) -> dict: + return await _maybe_await(cal.get_properties(keys)) + + # Can't use the sync helper with async get_properties, + # so inline the filtering + comp_set_key = "{urn:ietf:params:xml:ns:caldav}supported-calendar-component-set" + matching_calendars: list[Any] = [] + for c in calendars: + try: + props = await async_get_props(c, [comp_set_key]) + cal_components = props.get(comp_set_key, []) + if cal_components and all( + comp in cal_components for comp in supported_calendar_component_set + ): + matching_calendars.append(c) + except Exception: + pass + + if not matching_calendars: + for c in calendars: + url_path = str(c.url).lower() + if "VTODO" in supported_calendar_component_set: + if "/tasks/" in url_path or "_tasks/" in url_path: + matching_calendars.append(c) + elif "VJOURNAL" in supported_calendar_component_set: + if "/journal" in url_path or "_journal" in url_path: + matching_calendars.append(c) + + if not matching_calendars: + return None, False + calendars = matching_calendars + # Look for a dedicated test calendar first for c in calendars: try: @@ -83,7 +287,7 @@ async def get_or_create_test_calendar( pass # Fall back to first calendar - if calendar is None: + if calendar is None and calendars: calendar = calendars[0] return calendar, created diff --git a/tests/test_async_integration.py b/tests/test_async_integration.py index ffb31e3c..1f733b2e 100644 --- a/tests/test_async_integration.py +++ b/tests/test_async_integration.py @@ -8,13 +8,15 @@ """ import asyncio -from datetime import datetime +from datetime import datetime, timedelta, timezone from functools import wraps from typing import Any import pytest import pytest_asyncio +from caldav.compatibility_hints import FeatureSet + from .test_servers import TestServer, get_available_servers @@ -34,54 +36,79 @@ async def wrapper(*args, **kwargs): return wrapper -# Test data -ev1 = """BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Example Corp.//CalDAV Client//EN -BEGIN:VEVENT -UID:async-test-event-001@example.com -DTSTAMP:20060712T182145Z -DTSTART:20060714T170000Z -DTEND:20060715T040000Z -SUMMARY:Async Test Event -END:VEVENT -END:VCALENDAR""" +# Dynamic test data generators - use near-future dates to avoid +# min-date-time restrictions on servers like CCS. +_base_date = None + + +def _get_base_date() -> datetime: + """Return a stable base date for the current test session (tomorrow at noon UTC).""" + global _base_date + if _base_date is None: + tomorrow = datetime.now(tz=timezone.utc).date() + timedelta(days=1) + _base_date = datetime(tomorrow.year, tomorrow.month, tomorrow.day, 12, 0, 0) + return _base_date -ev2 = """BEGIN:VCALENDAR + +def _fmt(dt: datetime) -> str: + return dt.strftime("%Y%m%dT%H%M%SZ") + + +def make_event(uid: str, summary: str, dtstart: datetime, dtend: datetime) -> str: + return f"""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//Example Corp.//CalDAV Client//EN BEGIN:VEVENT -UID:async-test-event-002@example.com -DTSTAMP:20060712T182145Z -DTSTART:20060715T170000Z -DTEND:20060716T040000Z -SUMMARY:Second Async Test Event +UID:{uid} +DTSTAMP:{_fmt(datetime.now(tz=timezone.utc))} +DTSTART:{_fmt(dtstart)} +DTEND:{_fmt(dtend)} +SUMMARY:{summary} END:VEVENT END:VCALENDAR""" -todo1 = """BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Example Corp.//CalDAV Client//EN -BEGIN:VTODO -UID:async-test-todo-001@example.com -DTSTAMP:20060712T182145Z -SUMMARY:Async Test Todo -STATUS:NEEDS-ACTION -END:VTODO -END:VCALENDAR""" -todo2 = """BEGIN:VCALENDAR +def make_todo(uid: str, summary: str, status: str = "NEEDS-ACTION") -> str: + return f"""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//Example Corp.//CalDAV Client//EN BEGIN:VTODO -UID:async-test-todo-002@example.com -DTSTAMP:20060712T182145Z -SUMMARY:Completed Async Todo -STATUS:COMPLETED +UID:{uid} +DTSTAMP:{_fmt(datetime.now(tz=timezone.utc))} +SUMMARY:{summary} +STATUS:{status} END:VTODO END:VCALENDAR""" +def ev1() -> str: + base = _get_base_date() + return make_event( + "async-test-event-001@example.com", + "Async Test Event", + base, + base + timedelta(hours=11), + ) + + +def ev2() -> str: + base = _get_base_date() + return make_event( + "async-test-event-002@example.com", + "Second Async Test Event", + base + timedelta(days=1), + base + timedelta(days=1, hours=11), + ) + + +def todo1() -> str: + return make_todo("async-test-todo-001@example.com", "Async Test Todo") + + +def todo2() -> str: + return make_todo("async-test-todo-002@example.com", "Completed Async Todo", "COMPLETED") + + async def add_event(calendar: Any, data: str) -> Any: """Helper to add an event to a calendar.""" from caldav.aio import AsyncEvent @@ -112,6 +139,29 @@ class AsyncFunctionalTestsBaseClass: # Server configuration - set by dynamic class generation server: TestServer + @property + def _features(self): + """Cached FeatureSet from server config.""" + if not hasattr(self.__class__, "_feature_set_cache"): + features = self.server.features + if isinstance(features, str): + import caldav.compatibility_hints + + name = features + if name.startswith("compatibility_hints."): + name = name[len("compatibility_hints.") :] + features = getattr(caldav.compatibility_hints, name) + self.__class__._feature_set_cache = FeatureSet(features) + return self.__class__._feature_set_cache + + def is_supported(self, feature, return_type=bool, accept_fragile=False): + return self._features.is_supported(feature, return_type, accept_fragile=accept_fragile) + + def skip_unless_support(self, feature): + if not self.is_supported(feature): + msg = self._features.find_feature(feature).get("description", feature) + pytest.skip("Test skipped due to server incompatibility issue: " + msg) + @pytest.fixture(scope="class") def test_server(self) -> TestServer: """Get the test server for this class.""" @@ -164,7 +214,7 @@ async def async_calendar(self, async_client: Any) -> Any: from caldav.aio import AsyncPrincipal from caldav.lib.error import AuthorizationError, NotFoundError - from .fixture_helpers import get_or_create_test_calendar + from .fixture_helpers import aget_or_create_test_calendar calendar_name = f"async-test-{datetime.now().strftime('%Y%m%d%H%M%S%f')}" @@ -176,7 +226,7 @@ async def async_calendar(self, async_client: Any) -> Any: pass # Use shared helper for calendar setup - calendar, created = await get_or_create_test_calendar( + calendar, created = await aget_or_create_test_calendar( async_client, principal, calendar_name=calendar_name ) @@ -192,6 +242,54 @@ async def async_calendar(self, async_client: Any) -> Any: except Exception: pass + @pytest_asyncio.fixture + async def async_task_list(self, async_client: Any) -> Any: + """Create a task list for todo tests. + + For servers that don't support mixed calendars (like Zimbra), todos must + be stored in a separate task list with supported_calendar_component_set=["VTODO"]. + """ + from caldav.aio import AsyncPrincipal + from caldav.lib.error import AuthorizationError, NotFoundError + + from .fixture_helpers import aget_or_create_test_calendar + + # Check if server supports mixed calendars + supports_mixed = True + if hasattr(async_client, "features") and async_client.features: + supports_mixed = async_client.features.is_supported("save-load.todo.mixed-calendar") + + calendar_name = f"async-task-list-{datetime.now().strftime('%Y%m%d%H%M%S%f')}" + + # Try to get principal for calendar operations + principal = None + try: + principal = await AsyncPrincipal.create(async_client) + except (NotFoundError, AuthorizationError): + pass + + # For servers without mixed calendar support, create a dedicated task list + component_set = ["VTODO"] if not supports_mixed else None + + calendar, created = await aget_or_create_test_calendar( + async_client, + principal, + calendar_name=calendar_name, + supported_calendar_component_set=component_set, + ) + + if calendar is None: + pytest.skip("Could not create or find a task list for testing") + + yield calendar + + # Only cleanup if we created the calendar + if created: + try: + await calendar.delete() + except Exception: + pass + # ==================== Test Methods ==================== @pytest.mark.asyncio @@ -208,33 +306,47 @@ async def test_principal_calendars(self, async_client: Any) -> None: @pytest.mark.asyncio async def test_principal_make_calendar(self, async_client: Any) -> None: """Test creating and deleting a calendar.""" + self.skip_unless_support("create-calendar") + from caldav.aio import AsyncCalendarSet, AsyncPrincipal from caldav.lib.error import AuthorizationError, MkcalendarError, NotFoundError - calendar_name = f"async-principal-test-{datetime.now().strftime('%Y%m%d%H%M%S%f')}" + from .fixture_helpers import cleanup_calendar_objects + + cal_id = "pythoncaldav-async-test" calendar = None + principal = None - # Try principal-based calendar creation first (works for Baikal, Xandikos) + # Try principal-based calendar creation (most servers) try: principal = await AsyncPrincipal.create(async_client) - calendar = await principal.make_calendar(name=calendar_name) - except (NotFoundError, AuthorizationError, MkcalendarError): - # Fall back to direct calendar creation (works for Radicale) + calendar = await principal.make_calendar(name="Async Test", cal_id=cal_id) + except (MkcalendarError, AuthorizationError): + # Calendar already exists from a previous run - reuse it + # (mirrors sync _fixCalendar_ pattern) + if principal is not None: + calendar = principal.calendar(cal_id=cal_id) + except NotFoundError: + # Principal discovery failed pass if calendar is None: - # Try creating calendar at client URL + # Fall back to CalendarSet at client URL (e.g. Radicale) + calendar_home = AsyncCalendarSet(client=async_client, url=async_client.url) try: - calendar_home = AsyncCalendarSet(client=async_client, url=async_client.url) - calendar = await calendar_home.make_calendar(name=calendar_name) - except MkcalendarError: - pytest.skip("Server does not support MKCALENDAR") + calendar = await calendar_home.make_calendar(name="Async Test", cal_id=cal_id) + except (MkcalendarError, AuthorizationError): + calendar = async_client.calendar(cal_id=cal_id) assert calendar is not None assert calendar.url is not None - # Clean up - await calendar.delete() + # Clean up based on server capabilities + if self.is_supported("delete-calendar"): + await calendar.delete() + else: + # Can't delete the calendar, just wipe its objects + await cleanup_calendar_objects(calendar) @pytest.mark.asyncio async def test_search_events(self, async_calendar: Any) -> None: @@ -242,8 +354,8 @@ async def test_search_events(self, async_calendar: Any) -> None: from caldav.aio import AsyncEvent # Add test events - await add_event(async_calendar, ev1) - await add_event(async_calendar, ev2) + await add_event(async_calendar, ev1()) + await add_event(async_calendar, ev2()) # Search for all events events = await async_calendar.search(event=True) @@ -255,29 +367,30 @@ async def test_search_events(self, async_calendar: Any) -> None: async def test_search_events_by_date_range(self, async_calendar: Any) -> None: """Test searching for events in a date range.""" # Add test event - await add_event(async_calendar, ev1) + await add_event(async_calendar, ev1()) - # Search for events in the date range + # Search for events in the date range (covers ev1's day) + base = _get_base_date() events = await async_calendar.search( event=True, - start=datetime(2006, 7, 14), - end=datetime(2006, 7, 16), + start=base - timedelta(hours=1), + end=base + timedelta(days=1), ) assert len(events) >= 1 assert "Async Test Event" in events[0].data @pytest.mark.asyncio - async def test_search_todos_pending(self, async_calendar: Any) -> None: + async def test_search_todos_pending(self, async_task_list: Any) -> None: """Test searching for pending todos.""" from caldav.aio import AsyncTodo # Add pending and completed todos - await add_todo(async_calendar, todo1) - await add_todo(async_calendar, todo2) + await add_todo(async_task_list, todo1()) + await add_todo(async_task_list, todo2()) # Search for pending todos only (default) - todos = await async_calendar.search(todo=True, include_completed=False) + todos = await async_task_list.search(todo=True, include_completed=False) # Should only get the pending todo assert len(todos) >= 1 @@ -285,14 +398,14 @@ async def test_search_todos_pending(self, async_calendar: Any) -> None: assert any("NEEDS-ACTION" in t.data for t in todos) @pytest.mark.asyncio - async def test_search_todos_all(self, async_calendar: Any) -> None: + async def test_search_todos_all(self, async_task_list: Any) -> None: """Test searching for all todos including completed.""" # Add pending and completed todos - await add_todo(async_calendar, todo1) - await add_todo(async_calendar, todo2) + await add_todo(async_task_list, todo1()) + await add_todo(async_task_list, todo2()) # Search for all todos - todos = await async_calendar.search(todo=True, include_completed=True) + todos = await async_task_list.search(todo=True, include_completed=True) # Should get both todos assert len(todos) >= 2 @@ -303,8 +416,8 @@ async def test_events_method(self, async_calendar: Any) -> None: from caldav.aio import AsyncEvent # Add test events - await add_event(async_calendar, ev1) - await add_event(async_calendar, ev2) + await add_event(async_calendar, ev1()) + await add_event(async_calendar, ev2()) # Get all events events = await async_calendar.get_events() @@ -313,15 +426,15 @@ async def test_events_method(self, async_calendar: Any) -> None: assert all(isinstance(e, AsyncEvent) for e in events) @pytest.mark.asyncio - async def test_todos_method(self, async_calendar: Any) -> None: + async def test_todos_method(self, async_task_list: Any) -> None: """Test the todos() convenience method.""" from caldav.aio import AsyncTodo # Add test todos - await add_todo(async_calendar, todo1) + await add_todo(async_task_list, todo1()) # Get all pending todos - todos = await async_calendar.get_todos() + todos = await async_task_list.get_todos() assert len(todos) >= 1 assert all(isinstance(t, AsyncTodo) for t in todos) diff --git a/tests/test_caldav.py b/tests/test_caldav.py index 57e7a2be..48a3e307 100644 --- a/tests/test_caldav.py +++ b/tests/test_caldav.py @@ -2,7 +2,7 @@ """ Tests here communicate with third party servers and/or internal ad-hoc instances of Xandikos and Radicale, dependent on the -configuration in conf_private.py. +configuration in caldav_test_servers.yaml (see caldav_test_servers.yaml.example). Tests that do not require communication with a working caldav server belong in test_caldav_unit.py """ @@ -50,6 +50,7 @@ # RFC6638 users for scheduling tests - loaded from config file _config = load_test_server_config() rfc6638_users = _config.get("rfc6638_users", []) +from caldav import Calendar, DAVObject, Event, FreeBusy, Principal, Todo from caldav.compatibility_hints import ( incompatibility_description, ) ## TEMP - should be removed in the future @@ -57,7 +58,6 @@ from caldav.elements import cdav, dav, ical from caldav.lib import error from caldav.lib.python_utilities import to_local, to_str -from caldav.objects import Calendar, DAVObject, Event, FreeBusy, Principal, Todo from caldav.search import CalDAVSearcher log = logging.getLogger("caldav") @@ -877,10 +877,17 @@ def _cleanup(self, mode=None): cal.delete() if self.check_compatibility_flag("unique_calendar_ids") and mode == "pre": a = self._teardownCalendar(name="Yep") - for calid in (self.testcal_id, self.testcal_id2): + for calid in (self.testcal_id, self.testcal_id2, self.testcal_id + "-tasks"): self._teardownCalendar(cal_id=calid) if self.cleanup_regime == "thorough": - for name in ("Yep", "Yapp", "Yølp", self.testcal_id, self.testcal_id2): + for name in ( + "Yep", + "Yapp", + "Yølp", + self.testcal_id, + self.testcal_id2, + self.testcal_id + "-tasks", + ): self._teardownCalendar(name=name) self._teardownCalendar(cal_id=name) @@ -916,48 +923,50 @@ def _fixCalendar_(self, **kwargs): Should ideally return a new calendar, if that's not possible it should see if there exists a test calendar, if that's not possible, give up and return the primary calendar. + + Delegates core create-or-find logic to fixture_helpers.get_or_create_test_calendar, + handling test-infrastructure concerns (caching, cleanup, cal_id defaults) here. """ + from .fixture_helpers import get_or_create_test_calendar + if not self.is_supported("create-calendar"): if not self._default_calendar: - calendars = self.principal.get_calendars() - for c in calendars: - if ( - "pythoncaldav-test" - in c.get_properties( - [ - dav.DisplayName(), - ] - ).values() - ): - self._default_calendar = c - return c - self._default_calendar = calendars[0] - + self._default_calendar, _ = get_or_create_test_calendar(self.caldav, self.principal) return self._default_calendar - else: - if "name" not in kwargs: - if not self.check_compatibility_flag( - "unique_calendar_ids" - ) and self.cleanup_regime in ("light", "pre"): - self._teardownCalendar(cal_id=self.testcal_id) - if not self.is_supported("create-calendar.set-displayname"): - kwargs["name"] = None - else: - kwargs["name"] = "Yep" - if "cal_id" not in kwargs: + + # Pre-processing: set up defaults for name and cal_id + if "name" not in kwargs: + if not self.check_compatibility_flag("unique_calendar_ids") and self.cleanup_regime in ( + "light", + "pre", + ): + self._teardownCalendar(cal_id=self.testcal_id) + if not self.is_supported("create-calendar.set-displayname"): + kwargs["name"] = None + else: + kwargs["name"] = "Yep" + if "cal_id" not in kwargs: + # Use a separate calendar for non-VEVENT component sets + # (e.g. VTODO-only) to avoid reusing a VEVENT-only calendar + # on servers where MKCALENDAR "already exists" falls through + # to the existing calendar with the wrong component set. + comp_set = kwargs.get("supported_calendar_component_set", []) + if comp_set and "VEVENT" not in comp_set: + kwargs["cal_id"] = self.testcal_id + "-tasks" + else: kwargs["cal_id"] = self.testcal_id - try: - ret = self.principal.make_calendar(**kwargs) - except (error.MkcalendarError, error.AuthorizationError): - ## "calendar already exists" can be ignored (at least - ## if no_delete_calendar flag is set). Cyrus wrongly - ## flags this throug an AuthorizationError. I guess - ## the logic is "you are not authorized to override - ## a unique id constraint") - ret = self.principal.calendar(cal_id=kwargs["cal_id"]) - if self.cleanup_regime == "post": - self.calendars_used.append(ret) - return ret + + ret, _ = get_or_create_test_calendar( + self.caldav, + self.principal, + calendar_name=kwargs.get("name", "pythoncaldav-test"), + cal_id=kwargs.get("cal_id"), + supported_calendar_component_set=kwargs.get("supported_calendar_component_set"), + ) + + if self.cleanup_regime == "post": + self.calendars_used.append(ret) + return ret def testCheckCompatibility(self, request) -> None: try: @@ -984,6 +993,12 @@ def testCheckCompatibility(self, request) -> None: expectation = fe.is_supported(feature, str) if "fragile" in (observation, expectation): continue + if "unknown" in (observation, expectation): + continue + ## Skip features the checker never explicitly tested - + ## the observation would just be a default, not a real result + if feature not in observed and feature not in fo._server_features: + continue type_ = fo.find_feature(feature).get("type", "server-feature") if type_ in ( "client-feature", @@ -1627,10 +1642,12 @@ def testLoadEvent(self): c2 = self._fixCalendar(name="Yapp", cal_id=self.testcal_id2) e1_ = c1.add_event(ev1) - e1_.load() + if not self.check_compatibility_flag("event_by_url_is_broken"): + e1_.load() e1 = c1.get_events()[0] - assert e1.url == e1_.url - e1.load() + if not self.check_compatibility_flag("event_by_url_is_broken"): + assert e1.url == e1_.url + e1.load() if ( not self.check_compatibility_flag("unique_calendar_ids") and self.cleanup_regime == "post" @@ -1672,12 +1689,13 @@ def testCopyEvent(self): ## which shares the id with the event in c1? e1_in_c2.vobject_instance.vevent.summary.value = "asdf" e1_in_c2.save() - e1.load() - ## should e1.summary be 'asdf' or 'Bastille Day Party'? I do - ## not know, but all implementations I've tested will treat - ## the copy in the other calendar as a distinct entity, even - ## if the uid is the same. - assert e1.vobject_instance.vevent.summary.value == "Bastille Day Party" + if not self.check_compatibility_flag("event_by_url_is_broken"): + e1.load() + ## should e1.summary be 'asdf' or 'Bastille Day Party'? I do + ## not know, but all implementations I've tested will treat + ## the copy in the other calendar as a distinct entity, even + ## if the uid is the same. + assert e1.vobject_instance.vevent.summary.value == "Bastille Day Party" assert c2.get_events()[0].vobject_instance.vevent.uid == e1.vobject_instance.vevent.uid ## Duplicate the event in the same calendar, with same uid - @@ -1728,6 +1746,7 @@ def testGetSupportedComponents(self): def testSearchEvent(self): self.skip_unless_support("save-load.event") self.skip_unless_support("search") + self.skip_unless_support("search.time-range.event.old-dates") c = self._fixCalendar() num_existing = len(c.get_events()) @@ -2556,6 +2575,7 @@ def testTodoDatesearch(self): """ self.skip_unless_support("save-load.todo") self.skip_unless_support("search.time-range.todo") + self.skip_unless_support("search.time-range.todo.old-dates") c = self._fixCalendar(supported_calendar_component_set=["VTODO"]) # add todo-item @@ -2626,6 +2646,9 @@ def testTodoDatesearch(self): # Hence a compliant server should chuck out all the todos except t5. # Not all servers perform according to (my interpretation of) the RFC. foo = 5 + implicit_todo_fragile = ( + self.is_supported("search.recurrences.includes-implicit.todo", str) == "fragile" + ) if not self.is_supported("search.recurrences.includes-implicit.todo"): foo -= 1 ## t6 will not be returned if self.check_compatibility_flag( @@ -2636,8 +2659,12 @@ def testTodoDatesearch(self): foo -= 2 ## t1 and t4 not returned elif self.check_compatibility_flag("vtodo_datesearch_notime_task_is_skipped"): foo -= 1 ## t4 not returned - assert len(todos1) == foo - assert len(todos2) == foo + if implicit_todo_fragile: + assert len(todos1) in (foo, foo + 1) + assert len(todos2) in (foo, foo + 1) + else: + assert len(todos1) == foo + assert len(todos2) == foo ## verify that "expand" works if self.is_supported("search.recurrences.includes-implicit.todo"): @@ -2671,14 +2698,15 @@ def testTodoDatesearch(self): urls_found = [x.url for x in todos1] urls_found2 = [x.url for x in todos2] assert set(urls_found) == set(urls_found2) - if self.is_supported("search.recurrences.includes-implicit.todo"): - urls_found.remove(t6.url) + urls_found = set(urls_found) + if self.is_supported("search.recurrences.includes-implicit.todo", accept_fragile=True): + urls_found.discard(t6.url) if not self.check_compatibility_flag( "vtodo_datesearch_nodtstart_task_is_skipped" ) and not self.check_compatibility_flag("vtodo_datesearch_notime_task_is_skipped"): - urls_found.remove(t4.url) + urls_found.discard(t4.url) if self.check_compatibility_flag("vtodo_no_due_infinite_duration"): - urls_found.remove(t1.url) + urls_found.discard(t1.url) ## everything should be popped from urls_found by now assert len(urls_found) == 0 @@ -2779,11 +2807,7 @@ def testTodoRecurringCompleteSafe(self): def testTodoRecurringCompleteThisandfuture(self): self.skip_unless_support("save-load.todo") - ## TODO TODO TODO ... gross hack just to skip this test on bedework. - ## The test fails on bedework with a big 500 internal server error - ## it MAY be that I'm the one breaking standards - ## this ought to be researched better. - self.skip_unless_support("search.text") + self.skip_unless_support("save-load.todo.recurrences.thisandfuture") c = self._fixCalendar(supported_calendar_component_set=["VTODO"]) assert len(c.get_todos()) == 0 t6 = c.add_todo(todo6, status="NEEDS-ACTION") @@ -3088,6 +3112,7 @@ def testDateSearchAndFreeBusy(self): """ self.skip_unless_support("save-load.event") self.skip_unless_support("search") + self.skip_unless_support("search.time-range.event.old-dates") # Create calendar, add event ... c = self._fixCalendar() assert c.url is not None @@ -3280,6 +3305,7 @@ def testRecurringDateSearch(self): def testRecurringDateWithExceptionSearch(self): self.skip_unless_support("search") + self.skip_unless_support("search.time-range.event.old-dates") c = self._fixCalendar() # evr2 is a bi-weekly event starting 2024-04-11 @@ -3492,7 +3518,12 @@ class TestProxy(proxy.TestCase): def setup_method(self, *largs, **kwargs): self.proxy = f"http://localhost:{self.PROXY.flags.port}" - self.server_params = caldav_servers[-1] + # Need an HTTP server (not HTTPS) because the proxy injects headers + # into the response, which only works for unencrypted traffic + http_servers = [s for s in caldav_servers if s.get("url", "").startswith("http://")] + if not http_servers: + pytest.skip("No HTTP server available for proxy testing") + self.server_params = http_servers[0] def testNoProxyRaisesError(self): with client(**self.server_params) as conn: diff --git a/tests/test_caldav_unit.py b/tests/test_caldav_unit.py index d4fe90a8..2b53569c 100755 --- a/tests/test_caldav_unit.py +++ b/tests/test_caldav_unit.py @@ -468,7 +468,7 @@ def testAbsoluteURL(self): def _load(self, only_if_unloaded=True): self.data = todo6 - @mock.patch("caldav.objects.CalendarObjectResource.load", new=_load) + @mock.patch("caldav.calendarobjectresource.CalendarObjectResource.load", new=_load) def testDateSearch(self): """ ## ref https://github.com/python-caldav/caldav/issues/133 diff --git a/tests/test_compatibility_hints.py b/tests/test_compatibility_hints.py index 8b752509..aa1dbc83 100644 --- a/tests/test_compatibility_hints.py +++ b/tests/test_compatibility_hints.py @@ -7,7 +7,71 @@ server for the tests in this file. """ -from caldav.compatibility_hints import FeatureSet +import warnings + +import pytest + +from caldav.compatibility_hints import VALID_SUPPORT_LEVELS, FeatureSet +from caldav.config import resolve_features as _resolve_features + + +class TestConfigValidation: + """Test configuration validation in FeatureSet""" + + def test_invalid_support_level_warns(self) -> None: + """Invalid support level should emit a warning""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + FeatureSet({"save-load.event": {"support": "invalid-level"}}) + assert len(w) == 1 + assert "invalid support level" in str(w[0].message).lower() + assert "invalid-level" in str(w[0].message) + + def test_valid_support_levels_no_warning(self) -> None: + """Valid support levels should not emit warnings""" + for level in VALID_SUPPORT_LEVELS: + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + FeatureSet({"save-load.event": {"support": level}}) + # Filter to only UserWarnings about support levels + support_warnings = [x for x in w if "support level" in str(x.message).lower()] + assert len(support_warnings) == 0, f"Level '{level}' should be valid" + + def test_unknown_feature_warns(self) -> None: + """Unknown feature name should emit a warning""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + FeatureSet({"typo-feature-name": {"support": "full"}}) + assert len(w) == 1 + assert "unknown feature" in str(w[0].message).lower() + assert "typo-feature-name" in str(w[0].message) + + def test_known_feature_no_warning(self) -> None: + """Known feature names should not emit warnings""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + FeatureSet({"save-load.event": {"support": "full"}}) + # Filter to only UserWarnings about unknown features + unknown_warnings = [x for x in w if "unknown feature" in str(x.message).lower()] + assert len(unknown_warnings) == 0 + + def test_boolean_shortcut_no_warning(self) -> None: + """Boolean shortcuts (True/False) should not emit warnings""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + FeatureSet({"save-load.event": True}) + FeatureSet({"save-load.todo": False}) + # Filter to only UserWarnings about support levels + support_warnings = [x for x in w if "support level" in str(x.message).lower()] + assert len(support_warnings) == 0 + + def test_string_shortcut_validates(self) -> None: + """String shortcuts should also be validated""" + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + FeatureSet({"save-load.event": "bad-value"}) + assert len(w) == 1 + assert "invalid support level" in str(w[0].message).lower() class TestFeatureSetCollapse: @@ -178,8 +242,8 @@ def test_collapse_no_parent_features(self) -> None: # Should remain unchanged assert fs._server_features == {"sync-token": {"support": "full"}} - def test_collapse_single_subfeature_no_collapse(self) -> None: - """Single subfeature without parent value should NOT collapse""" + def test_collapse_single_subfeature(self) -> None: + """Single subfeature should collapse since parent derives from children""" fs = FeatureSet() # sync-token only has one subfeature: delete @@ -189,9 +253,9 @@ def test_collapse_single_subfeature_no_collapse(self) -> None: fs.collapse() - # Single subfeature should remain (no collapse unless parent has value) - assert "sync-token.delete" in fs._server_features - assert "sync-token" not in fs._server_features + # Parent status is derived from the single child, so collapse is valid + assert "sync-token" in fs._server_features + assert "sync-token.delete" not in fs._server_features def test_collapse_with_complex_dict_values(self) -> None: """Collapse should handle complex dictionary values""" @@ -287,3 +351,212 @@ def test_hierarchical_vs_independent_subfeatures(self) -> None: f"create-calendar should default to 'full' ignoring independent " f"subfeature .auto, but got {result2}" ) + + def test_intermediate_feature_derives_from_children(self) -> None: + """Test that intermediate features (e.g. search.text) derive status from their children""" + # search.text has 5 children: case-sensitive, case-insensitive, + # substring, category, by-uid (none have explicit defaults) + + # All children set with mixed statuses -> derive "unknown" + fs = FeatureSet( + { + "search.text.case-sensitive": {"support": "unsupported"}, + "search.text.case-insensitive": {"support": "unsupported"}, + "search.text.substring": {"support": "unsupported"}, + "search.text.category": {"support": "unsupported"}, + "search.text.by-uid": {"support": "fragile"}, + } + ) + assert not fs.is_supported("search.text") + assert fs.is_supported("search.text", return_type=dict) == {"support": "unknown"} + + # Partial children set with mixed non-positive statuses -> inconclusive, + # falls back to default ("full") + fs1b = FeatureSet( + { + "search.text.case-sensitive": {"support": "unsupported"}, + "search.text.by-uid": {"support": "fragile"}, + } + ) + assert fs1b.is_supported("search.text") + + # All children unsupported -> parent derives as "unsupported" + fs2 = FeatureSet( + { + "search.text.case-sensitive": {"support": "unsupported"}, + "search.text.case-insensitive": {"support": "unsupported"}, + "search.text.substring": {"support": "unsupported"}, + "search.text.category": {"support": "unsupported"}, + "search.text.by-uid": {"support": "unsupported"}, + } + ) + assert not fs2.is_supported("search.text") + assert fs2.is_supported("search.text", return_type=dict) == {"support": "unsupported"} + + # No children set -> falls back to default ("full") + fs3 = FeatureSet({}) + assert fs3.is_supported("search.text") + + # Explicit parent value takes precedence over children + fs4 = FeatureSet( + { + "search.text": {"support": "full"}, + "search.text.case-sensitive": {"support": "unsupported"}, + } + ) + assert fs4.is_supported("search.text") + + +class TestDeriveFromSubfeatures: + """Test _derive_from_subfeatures with partial and complete subfeature configs. + + Uses search.recurrences which has two relevant children without defaults: + - search.recurrences.expanded + - search.recurrences.includes-implicit + + The default for search.recurrences (a server-feature) is {"support": "full"}. + """ + + @pytest.mark.parametrize( + "scenario, config, query, expected_support", + [ + ( + "all_children_unsupported", + { + "search.recurrences.expanded": {"support": "unsupported"}, + "search.recurrences.includes-implicit": {"support": "unsupported"}, + }, + "search.recurrences", + "unsupported", + ), + ( + "all_children_supported", + { + "search.recurrences.expanded": {"support": "full"}, + "search.recurrences.includes-implicit": {"support": "full"}, + }, + "search.recurrences", + "full", + ), + ( + "mixed_children", + { + "search.recurrences.expanded": {"support": "unsupported"}, + "search.recurrences.includes-implicit": {"support": "full"}, + }, + "search.recurrences", + "unknown", + ), + ( + "partial_one_child_unsupported_falls_to_default", + { + "search.recurrences.expanded": {"support": "unsupported"}, + }, + "search.recurrences", + "full", # default - partial negative info is inconclusive + ), + ( + "partial_one_child_supported", + { + "search.recurrences.includes-implicit": {"support": "full"}, + }, + "search.recurrences", + "full", # any positive support → derive as supported + ), + ( + "gmx_partial_unsupported_query_unset_sibling_child", + { + "search.recurrences.expanded": {"support": "unsupported"}, + }, + "search.recurrences.includes-implicit.todo", + "full", # should NOT inherit unsupported from sibling + ), + ( + "parent_explicit_overrides_children", + { + "search.recurrences": {"support": "fragile"}, + }, + "search.recurrences.includes-implicit.todo", + "fragile", + ), + ( + "child_explicit_overrides_parent", + { + "search.recurrences": {"support": "unsupported"}, + "search.recurrences.includes-implicit.todo": {"support": "full"}, + }, + "search.recurrences.includes-implicit.todo", + "full", + ), + ], + ids=lambda x: x if isinstance(x, str) and "_" in x else "", + ) + def test_derivation_matrix( + self, + scenario: str, + config: dict, + query: str, + expected_support: str, + ) -> None: + fs = FeatureSet(config) + result = fs.is_supported(query, return_type=str) + assert result == expected_support, ( + f"Scenario '{scenario}': querying '{query}' with config {config} " + f"expected '{expected_support}', got '{result}'" + ) + + +class TestResolveFeatures: + """Test _resolve_features base+override resolution.""" + + def test_none_returns_none(self) -> None: + assert _resolve_features(None) is None + + def test_string_resolves_profile(self) -> None: + import caldav.compatibility_hints as ch + + result = _resolve_features("synology") + assert result is ch.synology + + def test_string_with_prefix(self) -> None: + import caldav.compatibility_hints as ch + + result = _resolve_features("compatibility_hints.synology") + assert result is ch.synology + + def test_dict_without_base_passes_through(self) -> None: + features = {"search.text": {"support": "unsupported"}} + result = _resolve_features(features) + assert result is features + + def test_base_with_overrides(self) -> None: + import caldav.compatibility_hints as ch + + original_sync_token = ch.synology.get("sync-token") + features = { + "base": "synology", + "sync-token": "full", + "search.text.substring": {"support": "unsupported"}, + } + result = _resolve_features(features) + # Should have the overrides + assert result["sync-token"] == "full" + assert result["search.text.substring"] == {"support": "unsupported"} + # Should still have base features + assert result["search.text.case-sensitive"] == {"support": "unsupported"} + # Should not have modified the original synology dict + assert ch.synology.get("sync-token") == original_sync_token + assert "search.text.substring" not in ch.synology + # Should not contain the "base" key + assert "base" not in result + + def test_base_with_prefix(self) -> None: + result = _resolve_features( + { + "base": "compatibility_hints.synology", + "sync-token": "full", + } + ) + assert result["sync-token"] == "full" + # Original base feature should be overridden + assert result["sync-token"] != "fragile" diff --git a/tests/test_docs.py b/tests/test_docs.py index e69ab585..640160a3 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -2,12 +2,15 @@ import manuel.codeblock import manuel.doctest +import manuel.ignore import manuel.testing import pytest from .test_servers import client_context, has_test_servers -m = manuel.codeblock.Manuel() +# manuel.ignore must be the base to process ignore directives first +m = manuel.ignore.Manuel() +m += manuel.codeblock.Manuel() m += manuel.doctest.Manuel() manueltest = manuel.testing.TestFactory(m) diff --git a/tests/test_lazy_import.py b/tests/test_lazy_import.py new file mode 100644 index 00000000..9cccecb7 --- /dev/null +++ b/tests/test_lazy_import.py @@ -0,0 +1,141 @@ +""" +Tests that ``import caldav`` is lazy and does not pull in heavy dependencies +(niquests, icalendar, lxml) until they are actually needed. + +Each test spawns a subprocess so the import state is pristine. +""" + +import subprocess +import sys +import textwrap + +import pytest + +PYTHON = sys.executable + + +def _run(code: str) -> subprocess.CompletedProcess: + """Run *code* in a fresh Python subprocess.""" + return subprocess.run( + [PYTHON, "-c", textwrap.dedent(code)], + capture_output=True, + text=True, + timeout=30, + ) + + +class TestLazyImport: + def test_import_does_not_load_niquests(self): + r = _run("""\ + import caldav, sys + for mod in ("niquests", "requests"): + assert mod not in sys.modules, f"{mod} loaded eagerly" + """) + assert r.returncode == 0, r.stderr + + def test_import_does_not_load_icalendar(self): + r = _run("""\ + import caldav, sys + assert "icalendar" not in sys.modules, "icalendar loaded eagerly" + """) + assert r.returncode == 0, r.stderr + + def test_import_does_not_load_lxml(self): + r = _run("""\ + import caldav, sys + assert "lxml" not in sys.modules, "lxml loaded eagerly" + """) + assert r.returncode == 0, r.stderr + + def test_version_available_without_heavy_imports(self): + r = _run("""\ + import caldav, sys + v = caldav.__version__ + assert isinstance(v, str) + for mod in ("niquests", "requests", "icalendar", "lxml"): + assert mod not in sys.modules, f"{mod} loaded by __version__" + """) + assert r.returncode == 0, r.stderr + + def test_davclient_importable(self): + r = _run("""\ + from caldav import DAVClient + assert callable(DAVClient) + """) + assert r.returncode == 0, r.stderr + + def test_calendar_importable(self): + r = _run("""\ + from caldav import Calendar + assert callable(Calendar) + """) + assert r.returncode == 0, r.stderr + + def test_event_importable(self): + r = _run("""\ + from caldav import Event, Todo, Journal, FreeBusy + """) + assert r.returncode == 0, r.stderr + + def test_principal_importable(self): + r = _run("""\ + from caldav import Principal + assert callable(Principal) + """) + assert r.returncode == 0, r.stderr + + def test_searcher_importable(self): + r = _run("""\ + from caldav import CalDAVSearcher + assert callable(CalDAVSearcher) + """) + assert r.returncode == 0, r.stderr + + def test_error_submodule(self): + r = _run("""\ + import caldav + err = caldav.error + assert hasattr(err, "NotFoundError") + """) + assert r.returncode == 0, r.stderr + + def test_dir_includes_lazy_names(self): + r = _run("""\ + import caldav + names = dir(caldav) + for expected in ("DAVClient", "Calendar", "Event", "Principal", + "CalDAVSearcher", "error", "__version__"): + assert expected in names, f"{expected!r} missing from dir(caldav)" + """) + assert r.returncode == 0, r.stderr + + def test_unknown_attribute_raises(self): + r = _run("""\ + import caldav + try: + caldav.NoSuchThing + raise SystemExit("should have raised AttributeError") + except AttributeError: + pass + """) + assert r.returncode == 0, r.stderr + + def test_get_functions_importable(self): + r = _run("""\ + from caldav import get_calendar, get_calendars, get_davclient + assert callable(get_calendar) + assert callable(get_calendars) + assert callable(get_davclient) + """) + assert r.returncode == 0, r.stderr + + def test_collection_types_importable(self): + r = _run("""\ + from caldav import ( + CalendarCollection, CalendarResult, CalendarSet, + DAVObject, CalendarObjectResource, + ScheduleMailbox, ScheduleInbox, ScheduleOutbox, + SynchronizableCalendarObjectCollection, + ) + """) + assert r.returncode == 0, r.stderr diff --git a/tests/test_search.py b/tests/test_search.py index 9c6ee972..71fec166 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -16,6 +16,7 @@ from caldav import Event, Journal, Todo from caldav.davclient import DAVClient +from caldav.lib.url import URL from caldav.search import CalDAVSearcher # Example icalendar data for testing @@ -143,7 +144,10 @@ @pytest.fixture def mock_client() -> DAVClient: """Create a mocked DAV client for testing.""" - return mock.Mock(spec=DAVClient) + client = mock.Mock(spec=DAVClient) + # mock_client.url needs to be a real URL object for URL.join() to work correctly + client.url = URL("https://calendar.example.com/calendars/user/") + return client @pytest.fixture diff --git a/tests/test_servers/__init__.py b/tests/test_servers/__init__.py index a7c13372..a81b8398 100644 --- a/tests/test_servers/__init__.py +++ b/tests/test_servers/__init__.py @@ -32,7 +32,7 @@ ExternalTestServer, TestServer, ) -from .config_loader import create_example_config, load_test_server_config +from .config_loader import ConfigParseError, load_test_server_config from .helpers import client_context, has_test_servers from .registry import ServerRegistry, get_available_servers, get_registry @@ -51,7 +51,7 @@ "get_registry", # Config loading "load_test_server_config", - "create_example_config", + "ConfigParseError", # Constants "DEFAULT_HTTP_TIMEOUT", "MAX_STARTUP_WAIT_SECONDS", diff --git a/tests/test_servers/base.py b/tests/test_servers/base.py index b4981079..f1805055 100644 --- a/tests/test_servers/base.py +++ b/tests/test_servers/base.py @@ -61,12 +61,22 @@ def url(self) -> str: @property def username(self) -> str | None: """Return the username for authentication.""" - return self.config.get("username") + # Support both test config format and main caldav config format + return ( + self.config.get("username") + or self.config.get("caldav_username") + or self.config.get("caldav_user") + ) @property def password(self) -> str | None: """Return the password for authentication.""" - return self.config.get("password") + # Support both test config format and main caldav config format + # Check explicitly for keys since empty string is valid + for key in ("password", "caldav_password", "caldav_pass"): + if key in self.config: + return self.config[key] + return None @property def features(self) -> Any: @@ -74,9 +84,12 @@ def features(self) -> Any: Return compatibility features for this server. This can be a dict of feature flags or a reference to a - compatibility hints object. + compatibility hints object. The "base" key (if present) is + resolved here via config.resolve_features(). """ - return self.config.get("features", []) + from caldav.config import resolve_features + + return resolve_features(self.config.get("features", [])) @abstractmethod def start(self) -> None: @@ -120,11 +133,15 @@ def get_sync_client(self) -> "DAVClient": """ from caldav.davclient import DAVClient - client = DAVClient( - url=self.url, - username=self.username, - password=self.password, - ) + kwargs: dict[str, Any] = { + "url": self.url, + "username": self.username, + "password": self.password, + "features": self.features, + } + if "ssl_verify_cert" in self.config: + kwargs["ssl_verify_cert"] = self.config["ssl_verify_cert"] + client = DAVClient(**kwargs) client.server_name = self.name # Attach no-op setup/teardown by default client.setup = lambda self_: None @@ -140,13 +157,16 @@ async def get_async_client(self) -> "AsyncDAVClient": """ from caldav.aio import get_async_davclient - return await get_async_davclient( - url=self.url, - username=self.username, - password=self.password, - features=self.features, - probe=False, # We already checked accessibility - ) + kwargs: dict[str, Any] = { + "url": self.url, + "username": self.username, + "password": self.password, + "features": self.features, + "probe": False, # We already checked accessibility + } + if "ssl_verify_cert" in self.config: + kwargs["ssl_verify_cert"] = self.config["ssl_verify_cert"] + return await get_async_davclient(**kwargs) def get_server_params(self) -> dict[str, Any]: """ @@ -165,6 +185,9 @@ def get_server_params(self) -> dict[str, Any]: "password": self.password, "features": self.features, } + # Pass through SSL verification setting if configured + if "ssl_verify_cert" in self.config: + params["ssl_verify_cert"] = self.config["ssl_verify_cert"] # Check if server is already running (either started by us or externally) already_running = self._started or self.is_accessible() if already_running: @@ -377,22 +400,34 @@ class ExternalTestServer(TestServer): External servers are already running somewhere - we don't start or stop them. This is used for testing against real CalDAV servers configured by the user. + + The URL can be provided directly via the 'url' config key, or constructed + from the 'auto-connect.url' feature (with domain, scheme, basepath keys). """ server_type = "external" def __init__(self, config: dict[str, Any] | None = None) -> None: super().__init__(config) - self._url = self.config.get("url", "") + self._url = self._construct_url() + + def _construct_url(self) -> str: + """Get explicit URL from config, if any. + + URL resolution from features (auto-connect.url) is handled by + the DAVClient constructor via _auto_url() - not duplicated here. + """ + return self.config.get("url") or self.config.get("caldav_url") or "" @property def url(self) -> str: return self._url def start(self) -> None: - """External servers are already running - nothing to do.""" - if not self.is_accessible(): - raise RuntimeError(f"External server {self.name} at {self.url} is not accessible") + """External servers are already running - just mark as started.""" + # No accessibility check here - the caldav library handles URL + # normalization, RFC6764 discovery, and will provide proper error + # messages if the server is unreachable self._started = True def stop(self) -> None: @@ -401,9 +436,20 @@ def stop(self) -> None: self._started_by_us = False def is_accessible(self) -> bool: - """Check if the external server is accessible.""" - try: - response = requests.get(self.url, timeout=DEFAULT_HTTP_TIMEOUT) - return response.status_code in (200, 401, 403, 404) - except Exception: - return False + """ + External servers are assumed accessible. + + The caldav library will handle connection errors with proper messages + if the server is actually unreachable. + """ + return True + + +# Deferred registration to avoid circular imports +def _register_external_server() -> None: + from .registry import register_server_class + + register_server_class("external", ExternalTestServer) + + +_register_external_server() diff --git a/tests/test_servers/config_loader.py b/tests/test_servers/config_loader.py index 7e819f2e..982e8158 100644 --- a/tests/test_servers/config_loader.py +++ b/tests/test_servers/config_loader.py @@ -1,33 +1,41 @@ """ Configuration loader for test servers. -This module provides functions for loading test server configuration -from YAML/JSON files, with fallback to the legacy conf_private.py. +This module loads test server configuration from: +1. Test-specific config files (tests/caldav_test_servers.yaml) +2. Main caldav config files with 'testing_allowed: true' sections """ -import warnings from pathlib import Path from typing import Any -from caldav.config import expand_env_vars, read_config +from caldav.config import expand_env_vars, get_all_test_servers, read_config -# Default config file locations (in priority order) -DEFAULT_CONFIG_LOCATIONS = [ - "tests/test_servers.yaml", - "tests/test_servers.json", +# Test-specific config file locations (don't need testing_allowed) +TEST_CONFIG_LOCATIONS = [ + "tests/caldav_test_servers.yaml", + "tests/caldav_test_servers.json", "~/.config/caldav/test_servers.yaml", "~/.config/caldav/test_servers.json", ] +class ConfigParseError(Exception): + """Raised when a config file exists but cannot be parsed.""" + + pass + + def load_test_server_config( config_file: str | None = None, ) -> dict[str, dict[str, Any]]: """ - Load test server configuration from file. + Load test server configuration. - Searches for config files in default locations and loads the first - one found. Falls back to conf_private.py with a deprecation warning. + Priority: + 1. Explicit config_file argument + 2. Test-specific config files (tests/caldav_test_servers.yaml, etc.) + 3. Main caldav config sections with 'testing_allowed: true' Args: config_file: Optional explicit path to config file @@ -36,214 +44,54 @@ def load_test_server_config( Dict mapping server names to their configuration dicts. Empty dict if no configuration found. - Example config file (YAML): - test-servers: - radicale: - type: embedded - enabled: true - port: 5232 - baikal: - type: docker - enabled: ${TEST_BAIKAL:-auto} - url: http://localhost:8800/dav.php + Raises: + ConfigParseError: If a config file exists but cannot be parsed. """ # Try explicit config file first if config_file: - cfg = read_config(config_file) - if cfg: - servers = cfg.get("test-servers", cfg) - return expand_env_vars(servers) - - # Try default locations - for loc in DEFAULT_CONFIG_LOCATIONS: + try: + return _load_config_file(config_file) + except ConfigParseError: + if Path(config_file).exists(): + raise + # File doesn't exist - fall through to other locations + + # Try test-specific config files + for loc in TEST_CONFIG_LOCATIONS: path = Path(loc).expanduser() if path.exists(): - cfg = read_config(str(path)) - if cfg: - servers = cfg.get("test-servers", cfg) - return expand_env_vars(servers) + return _load_config_file(str(path)) - # Fallback to conf_private.py with deprecation warning - return _load_from_conf_private() + # Try main caldav config (sections with testing_allowed) + servers = get_all_test_servers() + if servers: + # Add type: external for registry to use ExternalTestServer + for config in servers.values(): + config.setdefault("type", "external") + config.setdefault("enabled", True) + return servers + return {} -def _load_from_conf_private() -> dict[str, dict[str, Any]]: - """ - Load configuration from legacy conf_private.py. - - This provides backwards compatibility during migration to - the new YAML/JSON config format. - - Returns: - Dict mapping server names to their configuration dicts. - Empty dict if conf_private.py not found. - """ - import sys - original_path = sys.path.copy() +def _load_config_file(path: str) -> dict[str, dict[str, Any]]: + """Load and parse a config file.""" try: - sys.path.insert(0, "tests") - sys.path.insert(1, ".") + cfg = read_config(path) + except Exception as e: + raise ConfigParseError(f"Config file '{path}' exists but could not be parsed: {e}") from e - try: - import conf_private - - warnings.warn( - "conf_private.py is deprecated for test server configuration. " - "Please migrate to tests/test_servers.yaml. " - "See docs/testing.rst for the new format.", - DeprecationWarning, - stacklevel=3, - ) - return _convert_conf_private_to_config(conf_private) - except ImportError: - return {} - finally: - sys.path = original_path - - -def _convert_conf_private_to_config(conf_private: Any) -> dict[str, dict[str, Any]]: - """ - Convert conf_private.py format to new config format. + if not cfg: + raise ConfigParseError( + f"Config file '{path}' exists but could not be parsed. Check the YAML/JSON syntax." + ) - Args: - conf_private: The imported conf_private module + cfg = expand_env_vars(cfg) - Returns: - Dict mapping server names to their configuration dicts - """ - result: dict[str, dict[str, Any]] = {} - - # Convert caldav_servers list - if hasattr(conf_private, "caldav_servers"): - for i, server in enumerate(conf_private.caldav_servers): - name = server.get("name", f"server_{i}") - config: dict[str, Any] = { - "type": "external", - "enabled": server.get("enable", True), - } - # Copy all other keys - for key, value in server.items(): - if key not in ("enable", "name"): - config[key] = value - result[name.lower().replace(" ", "_")] = config - - # Handle boolean enable/disable switches - for attr in ( - "test_radicale", - "test_xandikos", - "test_baikal", - "test_nextcloud", - "test_cyrus", - "test_sogo", - "test_bedework", - ): - if hasattr(conf_private, attr): - server_name = attr.replace("test_", "") - if server_name not in result: - result[server_name] = {"type": server_name} - result[server_name]["enabled"] = getattr(conf_private, attr) - - # Handle host/port overrides - for server_name in ( - "radicale", - "xandikos", - "baikal", - "nextcloud", - "cyrus", - "sogo", - "bedework", - ): - host_attr = f"{server_name}_host" - port_attr = f"{server_name}_port" - - if hasattr(conf_private, host_attr): - if server_name not in result: - result[server_name] = {"type": server_name} - result[server_name]["host"] = getattr(conf_private, host_attr) - - if hasattr(conf_private, port_attr): - if server_name not in result: - result[server_name] = {"type": server_name} - result[server_name]["port"] = getattr(conf_private, port_attr) - - return result - - -def create_example_config() -> str: - """ - Generate an example config file content. + # Unwrap the "test-servers" key if present (the example YAML + # uses this as a top-level namespace). Also support configs + # where server dicts are at the top level directly. + if "test-servers" in cfg: + cfg = cfg["test-servers"] - Returns: - YAML-formatted example configuration - """ - return """# Test server configuration for caldav tests -# This file replaces the legacy conf_private.py - -test-servers: - # Embedded servers (run in-process) - radicale: - type: embedded - enabled: true - host: ${RADICALE_HOST:-localhost} - port: ${RADICALE_PORT:-5232} - username: user1 - password: "" - - xandikos: - type: embedded - enabled: true - host: ${XANDIKOS_HOST:-localhost} - port: ${XANDIKOS_PORT:-8993} - username: sometestuser - - # Docker servers (require docker-compose) - baikal: - type: docker - enabled: ${TEST_BAIKAL:-auto} # "auto" means check if docker available - host: ${BAIKAL_HOST:-localhost} - port: ${BAIKAL_PORT:-8800} - username: ${BAIKAL_USERNAME:-testuser} - password: ${BAIKAL_PASSWORD:-testpass} - - nextcloud: - type: docker - enabled: ${TEST_NEXTCLOUD:-auto} - host: ${NEXTCLOUD_HOST:-localhost} - port: ${NEXTCLOUD_PORT:-8801} - username: ${NEXTCLOUD_USERNAME:-testuser} - password: ${NEXTCLOUD_PASSWORD:-testpass} - - cyrus: - type: docker - enabled: ${TEST_CYRUS:-auto} - host: ${CYRUS_HOST:-localhost} - port: ${CYRUS_PORT:-8802} - username: ${CYRUS_USERNAME:-testuser@test.local} - password: ${CYRUS_PASSWORD:-testpassword} - - sogo: - type: docker - enabled: ${TEST_SOGO:-auto} - host: ${SOGO_HOST:-localhost} - port: ${SOGO_PORT:-8803} - username: ${SOGO_USERNAME:-testuser} - password: ${SOGO_PASSWORD:-testpassword} - - bedework: - type: docker - enabled: ${TEST_BEDEWORK:-auto} - host: ${BEDEWORK_HOST:-localhost} - port: ${BEDEWORK_PORT:-8804} - username: ${BEDEWORK_USERNAME:-admin} - password: ${BEDEWORK_PASSWORD:-bedework} - - # External/private servers (user-configured) - # Uncomment and configure for your own server: - # my-server: - # type: external - # enabled: true - # url: ${CALDAV_URL} - # username: ${CALDAV_USERNAME} - # password: ${CALDAV_PASSWORD} -""" + return cfg diff --git a/tests/test_servers/docker.py b/tests/test_servers/docker.py index 364380a1..645dfd0c 100644 --- a/tests/test_servers/docker.py +++ b/tests/test_servers/docker.py @@ -2,7 +2,7 @@ Docker-based test server implementations. This module provides test server implementations for servers that run -in Docker containers: Baikal, Nextcloud, Cyrus, SOGo, and Bedework. +in Docker containers: Baikal, Nextcloud, Cyrus, SOGo, Bedework, DAViCal, Davis, CCS, and Zimbra. """ import os @@ -97,7 +97,9 @@ def __init__(self, config: dict[str, Any] | None = None) -> None: config.setdefault("host", os.environ.get("CYRUS_HOST", "localhost")) config.setdefault("port", int(os.environ.get("CYRUS_PORT", "8802"))) config.setdefault("username", os.environ.get("CYRUS_USERNAME", "user1")) - config.setdefault("password", os.environ.get("CYRUS_PASSWORD", "x")) + config.setdefault( + "password", os.environ.get("CYRUS_PASSWORD", "any-password-seems-to-work") + ) # Set up Cyrus-specific compatibility hints if "features" not in config: config["features"] = compatibility_hints.cyrus.copy() @@ -217,8 +219,10 @@ def __init__(self, config: dict[str, Any] | None = None) -> None: config = config or {} config.setdefault("host", os.environ.get("DAVICAL_HOST", "localhost")) config.setdefault("port", int(os.environ.get("DAVICAL_PORT", "8805"))) - config.setdefault("username", os.environ.get("DAVICAL_USERNAME", "admin")) + config.setdefault("username", os.environ.get("DAVICAL_USERNAME", "testuser")) config.setdefault("password", os.environ.get("DAVICAL_PASSWORD", "testpass")) + if "features" not in config: + config["features"] = compatibility_hints.davical.copy() super().__init__(config) def _default_port(self) -> int: @@ -226,14 +230,14 @@ def _default_port(self) -> int: @property def url(self) -> str: - return f"http://{self.host}:{self.port}/davical/caldav.php/{self.username}/" + return f"http://{self.host}:{self.port}/caldav.php/{self.username}/" def is_accessible(self) -> bool: """Check if DAViCal is accessible.""" try: response = requests.request( "PROPFIND", - f"http://{self.host}:{self.port}/davical/caldav.php/", + f"http://{self.host}:{self.port}/caldav.php/", timeout=DEFAULT_HTTP_TIMEOUT, ) return response.status_code in (200, 207, 401, 403, 404) @@ -241,6 +245,110 @@ def is_accessible(self) -> bool: return False +class DavisTestServer(DockerTestServer): + """ + Davis CalDAV server in Docker. + + Davis is a modern admin interface for sabre/dav, using Symfony 7. + The standalone image bundles PHP-FPM + Caddy with SQLite. + """ + + name = "Davis" + + def __init__(self, config: dict[str, Any] | None = None) -> None: + config = config or {} + config.setdefault("host", os.environ.get("DAVIS_HOST", "localhost")) + config.setdefault("port", int(os.environ.get("DAVIS_PORT", "8806"))) + config.setdefault("username", os.environ.get("DAVIS_USERNAME", "testuser")) + config.setdefault("password", os.environ.get("DAVIS_PASSWORD", "testpass")) + if "features" not in config: + config["features"] = compatibility_hints.davis.copy() + super().__init__(config) + + def _default_port(self) -> int: + return 8806 + + @property + def url(self) -> str: + return f"http://{self.host}:{self.port}/dav/" + + +class CCSTestServer(DockerTestServer): + """ + Apple CalendarServer (CCS) in Docker. + + CCS is Apple's open-source CalDAV/CardDAV server (archived 2019). + Uses UID-based principal URLs and XML-based directory service. + """ + + name = "CCS" + + def __init__(self, config: dict[str, Any] | None = None) -> None: + config = config or {} + config.setdefault("host", os.environ.get("CCS_HOST", "localhost")) + config.setdefault("port", int(os.environ.get("CCS_PORT", "8807"))) + config.setdefault("username", os.environ.get("CCS_USERNAME", "user01")) + config.setdefault("password", os.environ.get("CCS_PASSWORD", "user01")) + if "features" not in config: + config["features"] = compatibility_hints.ccs.copy() + super().__init__(config) + + def _default_port(self) -> int: + return 8807 + + @property + def url(self) -> str: + return f"http://{self.host}:{self.port}/principals/" + + +class ZimbraTestServer(DockerTestServer): + """ + Zimbra Collaboration Suite CalDAV server in Docker. + + Zimbra is a heavyweight server (~6GB RAM, ~10 min first startup). + Uses HTTPS with a self-signed certificate. + """ + + name = "Zimbra" + + def __init__(self, config: dict[str, Any] | None = None) -> None: + config = config or {} + config.setdefault("host", os.environ.get("ZIMBRA_HOST", "zimbra-docker.zimbra.io")) + config.setdefault("port", int(os.environ.get("ZIMBRA_PORT", "8808"))) + config.setdefault( + "username", + os.environ.get("ZIMBRA_USERNAME", "testuser@zimbra.io"), + ) + config.setdefault("password", os.environ.get("ZIMBRA_PASSWORD", "testpass")) + config.setdefault("ssl_verify_cert", False) + if "features" not in config: + config["features"] = compatibility_hints.zimbra.copy() + super().__init__(config) + + def _default_port(self) -> int: + return 8808 + + @property + def url(self) -> str: + return f"https://{self.host}:{self.port}/dav/" + + def is_accessible(self) -> bool: + """Check if Zimbra is accessible (HTTPS with self-signed cert).""" + import warnings + + try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=Warning) + response = requests.get( + f"https://{self.host}:{self.port}/", + timeout=DEFAULT_HTTP_TIMEOUT, + verify=False, + ) + return response.status_code in (200, 301, 302, 401, 403, 404) + except Exception: + return False + + # Register server classes register_server_class("baikal", BaikalTestServer) register_server_class("nextcloud", NextcloudTestServer) @@ -248,3 +356,6 @@ def is_accessible(self) -> bool: register_server_class("sogo", SOGoTestServer) register_server_class("bedework", BedeworkTestServer) register_server_class("davical", DavicalTestServer) +register_server_class("davis", DavisTestServer) +register_server_class("ccs", CCSTestServer) +register_server_class("zimbra", ZimbraTestServer) diff --git a/tests/test_servers/registry.py b/tests/test_servers/registry.py index aba83388..cd5a4064 100644 --- a/tests/test_servers/registry.py +++ b/tests/test_servers/registry.py @@ -118,8 +118,19 @@ def load_from_config(self, config: dict) -> None: Args: config: Configuration dict + + Raises: + ValueError: If a server configuration is invalid """ + import warnings + for name, server_config in config.items(): + if not isinstance(server_config, dict): + raise ValueError( + f"Server '{name}': configuration must be a dict, " + f"got {type(server_config).__name__}" + ) + if not server_config.get("enabled", True): continue @@ -130,10 +141,23 @@ def load_from_config(self, config: dict) -> None: # Try to find by name if type not found server_class = get_server_class(name) - if server_class is not None: + if server_class is None: + warnings.warn( + f"Server '{name}': unknown type '{server_type}'. " + f"Valid types: embedded, docker, external, radicale, xandikos, " + f"baikal, nextcloud, cyrus, sogo, bedework. " + f"Server will be skipped.", + UserWarning, + stacklevel=2, + ) + continue + + try: server_config["name"] = name server = server_class(server_config) self.register(server) + except Exception as e: + raise ValueError(f"Server '{name}': failed to create server instance: {e}") from e def auto_discover(self) -> None: """ @@ -226,15 +250,43 @@ def get_registry() -> ServerRegistry: """ Get the global server registry instance. - Creates the registry on first call and runs auto-discovery. + Creates the registry on first call, runs auto-discovery, and loads + configuration from the config file (if present). Returns: The global ServerRegistry instance + + Raises: + ConfigParseError: If the config file exists but cannot be parsed + ValueError: If the config has invalid server definitions """ global _global_registry if _global_registry is None: _global_registry = ServerRegistry() _global_registry.auto_discover() + + # Load configuration from config file + # Let ConfigParseError and ValueError propagate - these are real errors + from .config_loader import ConfigParseError, load_test_server_config + + try: + config = load_test_server_config() + if config: + _global_registry.load_from_config(config) + except ConfigParseError: + # Re-raise config parse errors - these should fail loudly + raise + except Exception as e: + # Log unexpected errors but don't silently ignore them + import warnings + + warnings.warn( + f"Failed to load test server configuration: {e}. " + "Check tests/caldav_test_servers.yaml for errors.", + UserWarning, + stacklevel=2, + ) + return _global_registry diff --git a/tests/test_servers/test_config_loader.py b/tests/test_servers/test_config_loader.py new file mode 100644 index 00000000..1e51ed09 --- /dev/null +++ b/tests/test_servers/test_config_loader.py @@ -0,0 +1,90 @@ +"""Tests for config_loader module.""" + +from pathlib import Path + +import pytest + +from .config_loader import ConfigParseError, load_test_server_config + + +class TestLoadTestServerConfig: + """Tests for load_test_server_config function.""" + + def test_valid_yaml_config(self, tmp_path: Path) -> None: + """Test loading a valid YAML config file.""" + config_file = tmp_path / "test_servers.yaml" + config_file.write_text(""" +test-servers: + radicale: + type: embedded + enabled: true +""") + cfg = load_test_server_config(str(config_file)) + assert "radicale" in cfg + assert cfg["radicale"]["type"] == "embedded" + + def test_valid_json_config(self, tmp_path: Path) -> None: + """Test loading a valid JSON config file.""" + config_file = tmp_path / "test_servers.json" + config_file.write_text('{"test-servers": {"radicale": {"type": "embedded"}}}') + cfg = load_test_server_config(str(config_file)) + assert "radicale" in cfg + assert cfg["radicale"]["type"] == "embedded" + + def test_invalid_yaml_raises_error(self, tmp_path: Path) -> None: + """Test that invalid YAML raises ConfigParseError.""" + config_file = tmp_path / "test_servers.yaml" + config_file.write_text(""" +test-servers: + radicale: + type: embedded + invalid yaml: [unclosed bracket +""") + with pytest.raises(ConfigParseError) as exc_info: + load_test_server_config(str(config_file)) + assert "could not be parsed" in str(exc_info.value) + assert str(config_file) in str(exc_info.value) + + def test_invalid_json_and_yaml_raises_error(self, tmp_path: Path) -> None: + """Test that content invalid as both JSON and YAML raises ConfigParseError.""" + config_file = tmp_path / "test_servers.json" + # This is invalid for both JSON and YAML parsers + config_file.write_text("{{{{invalid syntax}}}}") + with pytest.raises(ConfigParseError) as exc_info: + load_test_server_config(str(config_file)) + assert "could not be parsed" in str(exc_info.value) + + def test_nonexistent_explicit_config_falls_back(self, tmp_path: Path, monkeypatch) -> None: + """Test that nonexistent explicit config falls back to default locations.""" + # Temporarily change to a directory without any config files + # and set HOME to temp dir to avoid finding real user configs + monkeypatch.chdir(tmp_path) + monkeypatch.setenv("HOME", str(tmp_path)) + cfg = load_test_server_config("/nonexistent/path/test_servers.yaml") + # Should return empty dict when no config found in any location + assert cfg == {} + + def test_flat_yaml_config(self, tmp_path: Path) -> None: + """Test loading a YAML config without test-servers wrapper.""" + config_file = tmp_path / "test_servers.yaml" + config_file.write_text(""" +radicale: + type: embedded + enabled: true +purelymail: + type: external + enabled: true + features: purelymail +""") + cfg = load_test_server_config(str(config_file)) + assert "radicale" in cfg + assert "purelymail" in cfg + assert cfg["purelymail"]["features"] == "purelymail" + + def test_empty_yaml_raises_error(self, tmp_path: Path) -> None: + """Test that an empty YAML file raises ConfigParseError.""" + config_file = tmp_path / "test_servers.yaml" + config_file.write_text("") + with pytest.raises(ConfigParseError) as exc_info: + load_test_server_config(str(config_file)) + assert "could not be parsed" in str(exc_info.value) diff --git a/tests/test_sync_token_fallback.py b/tests/test_sync_token_fallback.py index 54bff0a9..9a0927dc 100644 --- a/tests/test_sync_token_fallback.py +++ b/tests/test_sync_token_fallback.py @@ -22,6 +22,8 @@ def setup_method(self): self.mock_client = Mock() self.mock_client.features = Mock() self.mock_client.features.is_supported = Mock(return_value={}) + # mock_client.url needs to be a real URL for client.url.join() to work + self.mock_client.url = URL("http://example.com/") self.calendar = Calendar(client=self.mock_client, url=URL("http://example.com/calendar/")) diff --git a/tests/tools/convert_conf_private.py b/tests/tools/convert_conf_private.py index 6aaf72a1..d95694a2 100755 --- a/tests/tools/convert_conf_private.py +++ b/tests/tools/convert_conf_private.py @@ -1,21 +1,52 @@ #!/usr/bin/env python3 """ -Convert legacy conf_private.py to new test_servers.yaml format. +Convert legacy conf_private.py to new caldav_test_servers.yaml format. Usage: python tests/tools/convert_conf_private.py [conf_private.py] [output.yaml] If no arguments given, looks for tests/conf_private.py and outputs to -tests/test_servers.yaml. +tests/caldav_test_servers.yaml. The old conf_private.py format is deprecated and will be removed in v3.0. """ import argparse +import stat import sys from pathlib import Path from typing import Any +# Known feature preset names from compatibility_hints +KNOWN_FEATURE_PRESETS = [ + "fastmail", + "posteo", + "purelymail", + "gmx", + "icloud", + "google", + "yahoo", + "zoho", + "mailbox_org", +] + + +def get_feature_preset_name(features: Any) -> str | None: + """Check if features matches a known preset and return its name.""" + try: + from caldav import compatibility_hints + + # Check all dict attributes in compatibility_hints + for name in dir(compatibility_hints): + if name.startswith("_"): + continue + preset = getattr(compatibility_hints, name) + if isinstance(preset, dict) and features is preset: + return f"compatibility_hints.{name}" + except ImportError: + pass + return None + def load_conf_private(path: Path) -> dict[str, Any]: """Load configuration from conf_private.py file.""" @@ -60,9 +91,13 @@ def convert_to_yaml_config(conf_private: Any) -> dict[str, Any]: if old_key in server: config[new_key] = server[old_key] - # Handle features list + # Handle features - check if it's a known preset if "features" in server: - config["features"] = server["features"] + preset_name = get_feature_preset_name(server["features"]) + if preset_name: + config["features"] = preset_name + else: + config["features"] = server["features"] servers[key] = config @@ -177,7 +212,9 @@ def format_value(value: Any) -> str: def main(): - parser = argparse.ArgumentParser(description="Convert conf_private.py to test_servers.yaml") + parser = argparse.ArgumentParser( + description="Convert conf_private.py to caldav_test_servers.yaml" + ) parser.add_argument( "input", nargs="?", @@ -187,8 +224,8 @@ def main(): parser.add_argument( "output", nargs="?", - default="tests/test_servers.yaml", - help="Output YAML file (default: tests/test_servers.yaml)", + default="tests/caldav_test_servers.yaml", + help="Output YAML file (default: tests/caldav_test_servers.yaml)", ) parser.add_argument( "--dry-run", @@ -207,7 +244,10 @@ def main(): "\nIf you don't have a conf_private.py, copy the example instead:", file=sys.stderr, ) - print(" cp tests/test_servers.yaml.example tests/test_servers.yaml", file=sys.stderr) + print( + " cp tests/caldav_test_servers.yaml.example tests/caldav_test_servers.yaml", + file=sys.stderr, + ) sys.exit(1) print(f"Loading {input_path}...") @@ -230,7 +270,9 @@ def main(): print(yaml_content) else: output_path.write_text(yaml_content) - print(f"Written to {output_path}") + # Set restrictive permissions since file may contain passwords + output_path.chmod(stat.S_IRUSR | stat.S_IWUSR) # 0600 + print(f"Written to {output_path} (mode 0600)") print(f"\nYou can now delete {input_path} (it's deprecated and will be ignored)")