diff --git a/docs/dependency_groups.rst b/docs/dependency_groups.rst new file mode 100644 index 00000000..fac45af8 --- /dev/null +++ b/docs/dependency_groups.rst @@ -0,0 +1,77 @@ +Dependency Groups +================= + +.. currentmodule:: packaging.dependency_groups + +Package data as defined in ``pyproject.toml`` may include lists of dependencies +in named groups. This is described by the +:ref:`dependency groups specification `, which defines +the ``[dependency-groups]`` table. + +This module provides tools for resolving group names to lists of requirements, +most notably expanding ``include-group`` directives. + +Usage +----- + +Two primary interfaces are offered. An object-based one which caches results and +provides ``Requirements`` as its results: + +.. doctest:: + + >>> from packaging.dependency_groups import DependencyGroupResolver + >>> coverage = ["coverage"] + >>> test = ["pytest", {"include-group": "coverage"}] + >>> # A resolver is defined on a mapping of group names to group data, as + >>> # you might get by loading the [dependency-groups] TOML table. + >>> resolver = DependencyGroupResolver({"test": test, "coverage": coverage}) + >>> # resolvers support expanding group names to Requirements + >>> resolver.resolve("coverage") + (,) + >>> resolver.resolve("test") + (, ) + >>> # resolvers can also be used to lookup the dependency groups without + >>> # expanding includes + >>> resolver.lookup("test") + (, DependencyGroupInclude('coverage')) + +And a simpler functional interface which responds with strings: + +.. doctest:: + + >>> from packaging.dependency_groups import resolve_dependency_groups + >>> coverage = ["coverage"] + >>> test = ["pytest", {"include-group": "coverage"}] + >>> groups = {"test": test, "coverage": coverage} + >>> resolve_dependency_groups(groups, "test") + ('pytest', 'coverage') + +Reference +--------- + +Functional Interface +'''''''''''''''''''' + +.. autofunction:: resolve_dependency_groups + + +Object Model Interface +'''''''''''''''''''''' + +.. autoclass:: DependencyGroupInclude + :members: + +.. autoclass:: DependencyGroupResolver + :members: + +Exceptions +'''''''''' + +.. autoclass:: DuplicateGroupNames + :members: + +.. autoclass:: CyclicDependencyGroup + :members: + +.. autoclass:: InvalidDependencyGroupObject + :members: diff --git a/docs/index.rst b/docs/index.rst index cc9dca61..ac418995 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -31,6 +31,7 @@ The ``packaging`` library uses calendar-based versioning (``YY.N``). metadata tags pylock + dependency_groups utils .. toctree:: diff --git a/src/packaging/dependency_groups.py b/src/packaging/dependency_groups.py new file mode 100644 index 00000000..413e5cb4 --- /dev/null +++ b/src/packaging/dependency_groups.py @@ -0,0 +1,302 @@ +from __future__ import annotations + +import re +from collections.abc import Mapping, Sequence + +from .errors import _ErrorCollector +from .requirements import Requirement + +__all__ = [ + "CyclicDependencyGroup", + "DependencyGroupInclude", + "DependencyGroupResolver", + "DuplicateGroupNames", + "InvalidDependencyGroupObject", + "resolve_dependency_groups", +] + + +def __dir__() -> list[str]: + return __all__ + + +# ----------- +# Error Types +# ----------- + + +class DuplicateGroupNames(ValueError): + """ + The same dependency groups were defined twice, with different non-normalized names. + """ + + +class CyclicDependencyGroup(ValueError): + """ + The dependency group includes form a cycle. + """ + + def __init__(self, requested_group: str, group: str, include_group: str) -> None: + self.requested_group = requested_group + self.group = group + self.include_group = include_group + + if include_group == group: + reason = f"{group} includes itself" + else: + reason = f"{include_group} -> {group}, {group} -> {include_group}" + super().__init__( + "Cyclic dependency group include while resolving " + f"{requested_group}: {reason}" + ) + + +# in the PEP 735 spec, the tables in dependency group lists were described as +# "Dependency Object Specifiers", but the only defined type of object was a +# "Dependency Group Include" -- hence the naming of this error as "Object" +class InvalidDependencyGroupObject(ValueError): + """ + A member of a dependency group was identified as a dict, but was not in a valid + format. + """ + + +# ------------------------ +# Object Model & Interface +# ------------------------ + + +class DependencyGroupInclude: + __slots__ = ("include_group",) + + def __init__(self, include_group: str) -> None: + """ + Initialize a DependencyGroupInclude. + + :param include_group: The name of the group referred to by this include. + """ + self.include_group = include_group + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.include_group!r})" + + +class DependencyGroupResolver: + """ + A resolver for Dependency Group data. + + This class handles caching, name normalization, cycle detection, and other + parsing requirements. There are only two public methods for exploring the data: + ``lookup()`` and ``resolve()``. + + :param dependency_groups: A mapping, as provided via pyproject + ``[dependency-groups]``. + """ + + def __init__( + self, + dependency_groups: Mapping[str, Sequence[str | Mapping[str, str]]], + ) -> None: + errors = _ErrorCollector() + + self.dependency_groups = _normalize_group_names(dependency_groups, errors) + + # a map of group names to parsed data + self._parsed_groups: dict[ + str, tuple[Requirement | DependencyGroupInclude, ...] + ] = {} + # a map of group names to their ancestors, used for cycle detection + self._include_graph_ancestors: dict[str, tuple[str, ...]] = {} + # a cache of completed resolutions to Requirement lists + self._resolve_cache: dict[str, tuple[Requirement, ...]] = {} + + errors.finalize("[dependency-groups] data was invalid") + + def lookup(self, group: str) -> tuple[Requirement | DependencyGroupInclude, ...]: + """ + Lookup a group name, returning the parsed dependency data for that group. + This will not resolve includes. + + :param group: the name of the group to lookup + """ + group = _normalize_name(group) + + with _ErrorCollector().on_exit( + f"[dependency-groups] data for {group!r} was malformed" + ) as errors: + return self._parse_group(group, errors) + + def resolve(self, group: str) -> tuple[Requirement, ...]: + """ + Resolve a dependency group to a list of requirements. + + :param group: the name of the group to resolve + """ + group = _normalize_name(group) + + with _ErrorCollector().on_exit( + f"[dependency-groups] data for {group!r} was malformed" + ) as errors: + return self._resolve(group, group, errors) + + def _resolve( + self, group: str, requested_group: str, errors: _ErrorCollector + ) -> tuple[Requirement, ...]: + """ + This is a helper for cached resolution to strings. It preserves the name of the + group which the user initially requested in order to present a clearer error in + the event that a cycle is detected. + + :param group: The normalized name of the group to resolve. + :param requested_group: The group which was used in the original, user-facing + request. + """ + if group in self._resolve_cache: + return self._resolve_cache[group] + + parsed = self._parse_group(group, errors) + + resolved_group = [] + + for item in parsed: + if isinstance(item, Requirement): + resolved_group.append(item) + elif isinstance(item, DependencyGroupInclude): + include_group = _normalize_name(item.include_group) + + # if a group is cyclic, record the error + # otherwise, follow the include_group reference + # + # this allows us to examine all includes in a group, even in the + # presence of errors + if include_group in self._include_graph_ancestors.get(group, ()): + errors.error( + CyclicDependencyGroup( + requested_group, group, item.include_group + ) + ) + else: + self._include_graph_ancestors[include_group] = ( + *self._include_graph_ancestors.get(group, ()), + group, + ) + resolved_group.extend( + self._resolve(include_group, requested_group, errors) + ) + else: # pragma: no cover + raise NotImplementedError( + f"Invalid dependency group item after parse: {item}" + ) + + # in the event that errors were detected, present the group as empty and do not + # cache the result + # this ensures that repeated access to a cyclic group will raise multiple errors + if errors.errors: + return () + + self._resolve_cache[group] = tuple(resolved_group) + return self._resolve_cache[group] + + def _parse_group( + self, group: str, errors: _ErrorCollector + ) -> tuple[Requirement | DependencyGroupInclude, ...]: + # short circuit -- never do the work twice + if group in self._parsed_groups: + return self._parsed_groups[group] + + if group not in self.dependency_groups: + errors.error(LookupError(f"Dependency group '{group}' not found")) + return () + + raw_group = self.dependency_groups[group] + if isinstance(raw_group, str): + errors.error( + TypeError( + f"Dependency group {group!r} contained a string rather than a list." + ) + ) + return () + + if not isinstance(raw_group, Sequence): + errors.error( + TypeError(f"Dependency group {group!r} is not a sequence type.") + ) + return () + + elements: list[Requirement | DependencyGroupInclude] = [] + for item in raw_group: + if isinstance(item, str): + # packaging.requirements.Requirement parsing ensures that this is a + # valid PEP 508 Dependency Specifier + # raises InvalidRequirement on failure + elements.append(Requirement(item)) + elif isinstance(item, Mapping): + if tuple(item.keys()) != ("include-group",): + errors.error( + InvalidDependencyGroupObject( + f"Invalid dependency group item: {item!r}" + ) + ) + else: + include_group = item["include-group"] + elements.append(DependencyGroupInclude(include_group=include_group)) + else: + errors.error(TypeError(f"Invalid dependency group item: {item!r}")) + + self._parsed_groups[group] = tuple(elements) + return self._parsed_groups[group] + + +# -------------------- +# Functional Interface +# -------------------- + + +def resolve_dependency_groups( + dependency_groups: Mapping[str, Sequence[str | Mapping[str, str]]], /, *groups: str +) -> tuple[str, ...]: + """ + Resolve a dependency group to a tuple of requirements, as strings. + + :param dependency_groups: the parsed contents of the ``[dependency-groups]`` table + from ``pyproject.toml`` + :param groups: the name of the group(s) to resolve + """ + resolver = DependencyGroupResolver(dependency_groups) + return tuple(str(r) for group in groups for r in resolver.resolve(group)) + + +# ---------------- +# internal helpers +# ---------------- + + +_NORMALIZE_PATTERN = re.compile(r"[-_.]+") + + +def _normalize_name(name: str) -> str: + return _NORMALIZE_PATTERN.sub("-", name).lower() + + +def _normalize_group_names( + dependency_groups: Mapping[str, Sequence[str | Mapping[str, str]]], + errors: _ErrorCollector, +) -> dict[str, Sequence[str | Mapping[str, str]]]: + original_names: dict[str, list[str]] = {} + normalized_groups: dict[str, Sequence[str | Mapping[str, str]]] = {} + + for group_name, value in dependency_groups.items(): + normed_group_name = _normalize_name(group_name) + original_names.setdefault(normed_group_name, []).append(group_name) + normalized_groups[normed_group_name] = value + + for normed_name, names in original_names.items(): + if len(names) > 1: + errors.error( + DuplicateGroupNames( + "Duplicate dependency group names: " + f"{normed_name} ({', '.join(names)})" + ) + ) + + return normalized_groups diff --git a/tests/test_dependency_groups.py b/tests/test_dependency_groups.py new file mode 100644 index 00000000..de8f6caf --- /dev/null +++ b/tests/test_dependency_groups.py @@ -0,0 +1,489 @@ +from __future__ import annotations + +import re +import sys +import unittest.mock +from typing import Any + +import pytest + +from packaging.dependency_groups import ( + CyclicDependencyGroup, + DependencyGroupInclude, + DependencyGroupResolver, + DuplicateGroupNames, + InvalidDependencyGroupObject, + resolve_dependency_groups, +) +from packaging.errors import ExceptionGroup +from packaging.requirements import Requirement + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +GroupsTable: TypeAlias = "dict[str, list[str | dict[str, str]]]" + + +def _group_contains( + excinfo: pytest.ExceptionInfo[ExceptionGroup], + exc_type: type[BaseException], + *, + match: str | re.Pattern[str] | None = None, +) -> bool: + """ + pytest.raises().group_contains() cannot be used on ExceptionGroup + because it doesn't inherit from `exceptiongroup.BaseExceptionGroup` on + python versions < 3.11 . + + This is a similar helper, just for these tests. + """ + exc_group = excinfo.value + assert isinstance(exc_group, ExceptionGroup) + + for exc in exc_group.exceptions: + if not isinstance(exc, exc_type): + continue + if match is not None and not re.search(match, str(exc)): + continue + return True + + return False + + +def test_resolver_init_catches_normalization_conflict() -> None: + groups: GroupsTable = {"test": ["pytest"], "Test": ["pytest", "coverage"]} + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data was invalid" + ) as excinfo: + DependencyGroupResolver(groups) + + assert _group_contains( + excinfo, DuplicateGroupNames, match="Duplicate dependency group names" + ) + + +def test_lookup_on_trivial_normalization() -> None: + groups: GroupsTable = {"test": ["pytest"]} + resolver = DependencyGroupResolver(groups) + parsed_group = resolver.lookup("Test") + assert len(parsed_group) == 1 + assert isinstance(parsed_group[0], Requirement) + req = parsed_group[0] + assert req.name == "pytest" + + +def test_lookup_with_include_result() -> None: + groups: GroupsTable = { + "test": ["pytest", {"include-group": "runtime"}], + "runtime": ["click"], + } + resolver = DependencyGroupResolver(groups) + parsed_group = resolver.lookup("test") + assert len(parsed_group) == 2 + + assert isinstance(parsed_group[0], Requirement) + assert parsed_group[0].name == "pytest" + + assert isinstance(parsed_group[1], DependencyGroupInclude) + assert parsed_group[1].include_group == "runtime" + + +def test_lookup_does_not_trigger_cyclic_include() -> None: + groups: GroupsTable = { + "group1": [{"include-group": "group2"}], + "group2": [{"include-group": "group1"}], + } + resolver = DependencyGroupResolver(groups) + parsed_group = resolver.lookup("group1") + assert len(parsed_group) == 1 + + assert isinstance(parsed_group[0], DependencyGroupInclude) + assert parsed_group[0].include_group == "group2" + + +def test_expand_contract_model_only_does_inner_lookup_once() -> None: + groups: GroupsTable = { + "root": [ + {"include-group": "mid1"}, + {"include-group": "mid2"}, + {"include-group": "mid3"}, + {"include-group": "mid4"}, + ], + "mid1": [{"include-group": "contract"}], + "mid2": [{"include-group": "contract"}], + "mid3": [{"include-group": "contract"}], + "mid4": [{"include-group": "contract"}], + "contract": [{"include-group": "leaf"}], + "leaf": ["attrs"], + } + resolver = DependencyGroupResolver(groups) + + real_inner_resolve = resolver._resolve + with unittest.mock.patch( + "packaging.dependency_groups.DependencyGroupResolver._resolve", + side_effect=real_inner_resolve, + ) as spy: + resolved = resolver.resolve("root") + assert len(resolved) == 4 + assert all(item.name == "attrs" for item in resolved) + + # each of the `mid` nodes will call resolution with `contract`, but only the + # first of those evaluations should call for resolution of `leaf` -- after that, + # `contract` will be in the cache and `leaf` will not need to be resolved + spy.assert_any_call("leaf", "root", unittest.mock.ANY) + leaf_calls = [c for c in spy.mock_calls if c.args[0] == "leaf"] + assert len(leaf_calls) == 1 + + +def test_no_double_parse() -> None: + groups: GroupsTable = { + "test": [{"include-group": "runtime"}], + "runtime": ["click"], + } + resolver = DependencyGroupResolver(groups) + + parse = resolver.lookup("test") + assert len(parse) == 1 + assert isinstance(parse[0], DependencyGroupInclude) + assert parse[0].include_group == "runtime" + + mock_include = DependencyGroupInclude(include_group="perfidy") + + with unittest.mock.patch( + "packaging.dependency_groups.DependencyGroupInclude", + return_value=mock_include, + ): + # rerunning with that resolver will not re-resolve + reparse = resolver.lookup("test") + assert len(reparse) == 1 + assert isinstance(reparse[0], DependencyGroupInclude) + assert reparse[0].include_group == "runtime" + + # but verify that a fresh resolver (no cache) will get the mock + deceived_resolver = DependencyGroupResolver(groups) + deceived_parse = deceived_resolver.lookup("test") + assert len(deceived_parse) == 1 + assert isinstance(deceived_parse[0], DependencyGroupInclude) + assert deceived_parse[0].include_group == "perfidy" + + +@pytest.mark.parametrize("group_name_declared", ["foo-bar", "foo_bar", "foo..bar"]) +@pytest.mark.parametrize("group_name_used", ["foo-bar", "foo_bar", "foo..bar"]) +def test_normalized_name_is_used_for_include_group_lookups( + group_name_declared: str, group_name_used: str +) -> None: + groups: GroupsTable = { + group_name_declared: ["spam"], + "eggs": [{"include-group": group_name_used}], + } + resolver = DependencyGroupResolver(groups) + + result = resolver.resolve("eggs") + assert len(result) == 1 + assert isinstance(result[0], Requirement) + req = result[0] + assert req.name == "spam" + + +def test_empty_group() -> None: + groups: GroupsTable = {"test": []} + assert resolve_dependency_groups(groups, "test") == () + + +def test_str_list_group() -> None: + groups: GroupsTable = {"test": ["pytest"]} + assert resolve_dependency_groups(groups, "test") == ("pytest",) + + +def test_single_include_group() -> None: + groups: GroupsTable = { + "test": [ + "pytest", + {"include-group": "runtime"}, + ], + "runtime": ["sqlalchemy"], + } + assert set(resolve_dependency_groups(groups, "test")) == {"pytest", "sqlalchemy"} + + +def test_sdual_include_group() -> None: + groups: GroupsTable = { + "test": [ + "pytest", + ], + "runtime": ["sqlalchemy"], + } + assert set(resolve_dependency_groups(groups, "test", "runtime")) == { + "pytest", + "sqlalchemy", + } + + +def test_normalized_group_name() -> None: + groups: GroupsTable = { + "TEST": ["pytest"], + } + assert resolve_dependency_groups(groups, "test") == ("pytest",) + + +def test_no_such_group_name() -> None: + groups: GroupsTable = { + "test": ["pytest"], + } + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'testing' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "testing") + + assert _group_contains(excinfo, LookupError, match="'testing' not found") + + +def test_duplicate_normalized_name() -> None: + groups: GroupsTable = { + "test": ["pytest"], + "TEST": ["nose2"], + } + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data was invalid" + ) as excinfo: + resolve_dependency_groups(groups, "test") + + assert _group_contains( + excinfo, + DuplicateGroupNames, + match=r"Duplicate dependency group names: test \((test, TEST)|(TEST, test)\)", + ) + + +def test_cyclic_include() -> None: + groups: GroupsTable = { + "group1": [ + {"include-group": "group2"}, + ], + "group2": [ + {"include-group": "group1"}, + ], + } + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'group1' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "group1") + + assert _group_contains( + excinfo, + CyclicDependencyGroup, + match=( + "Cyclic dependency group include while resolving group1: " + "group1 -> group2, group2 -> group1" + ), + ) + + +def test_cyclic_include_many_steps() -> None: + groups: GroupsTable = {} + for i in range(100): + groups[f"group{i}"] = [{"include-group": f"group{i + 1}"}] + groups["group100"] = [{"include-group": "group0"}] + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'group0' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "group0") + + assert _group_contains( + excinfo, + CyclicDependencyGroup, + match="Cyclic dependency group include while resolving group0: ", + ) + + +def test_cyclic_include_self() -> None: + groups: GroupsTable = { + "group1": [ + {"include-group": "group1"}, + ], + } + + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'group1' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "group1") + + assert _group_contains( + excinfo, + CyclicDependencyGroup, + match=( + "Cyclic dependency group include while resolving group1: " + "group1 includes itself" + ), + ) + + +def test_cyclic_include_ring_under_root() -> None: + groups: GroupsTable = { + "root": [ + {"include-group": "group1"}, + ], + "group1": [ + {"include-group": "group2"}, + ], + "group2": [ + {"include-group": "group1"}, + ], + } + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'root' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "root") + + assert _group_contains( + excinfo, + CyclicDependencyGroup, + match=( + "Cyclic dependency group include while resolving root: " + "group1 -> group2, group2 -> group1" + ), + ) + + +# each access to a cyclic group should raise an error +def test_cyclic_include_accessed_repeatedly_on_resolver_instance() -> None: + groups: GroupsTable = { + "group1": [ + {"include-group": "group2"}, + ], + "group2": [ + {"include-group": "group1"}, + ], + } + resolver = DependencyGroupResolver(groups) + + # each access raises an exception group of the same shape + for _ in range(3): + with pytest.raises( + ExceptionGroup, + match=r"\[dependency-groups\] data for 'group1' was malformed", + ) as excinfo: + resolver.resolve("group1") + assert _group_contains( + excinfo, + CyclicDependencyGroup, + match=( + "Cyclic dependency group include while resolving group1: " + "group1 -> group2, group2 -> group1" + ), + ) + + +# a string is a Sequence[str] but is explicitly checked and rejected +def test_non_str_data() -> None: + groups: Any = {"test": "pytest, coverage"} + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'test' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "test") + + assert _group_contains( + excinfo, + TypeError, + match=r"Dependency group 'test' contained a string rather than a list.", + ) + + +def test_non_list_data() -> None: + groups: Any = {"test": 101} + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'test' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "test") + + assert _group_contains( + excinfo, TypeError, match=r"Dependency group 'test' is not a sequence type." + ) + + +@pytest.mark.parametrize( + "item", + [ + {}, + {"foo": "bar"}, + {"include-group": "testing", "foo": "bar"}, + ], +) +def test_unknown_object_shape(item: dict[str, str] | object) -> None: + groups: Any = {"test": [item]} + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'test' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "test") + + assert _group_contains( + excinfo, InvalidDependencyGroupObject, match="Invalid dependency group item:" + ) + + +def test_non_unexpected_item_type() -> None: + groups: Any = {"test": [object()]} + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'test' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "test") + + assert _group_contains(excinfo, TypeError, match="Invalid dependency group item") + + +def test_dependency_group_include_repr() -> None: + include = DependencyGroupInclude("test") + assert repr(include) == "DependencyGroupInclude('test')" + + +def test_resolution_can_capture_multiple_errors_at_once() -> None: + groups: Any = { + "all": [ + {"include-group": "all-invalid"}, + {"include-group": "all-valid"}, + ], + "all-valid": [ + {"include-group": "empty"}, + {"include-group": "simple"}, + ], + "all-invalid": [ + {"include-group": "self-reference"}, + {"include-group": "invalid-object"}, + {"include-group": "invalid-type"}, + {"include-group": "invalid-type"}, + ], + "self-reference": [{"include-group": "self-reference"}], + "invalid-object": [{}], + "invalid-type": "foo", + "empty": [], + "simple": ["jsonschema<5"], + } + + # sanity check: even in the presence of these invalid data, we can extract the valid + # parts + valid_resolution = resolve_dependency_groups(groups, "all-valid") + assert len(valid_resolution) == 1 + assert valid_resolution[0] == "jsonschema<5" + + # however, resolving everything triggers *multiple* errors, from the various + # incorrect pieces of data, collected in an exception group + with pytest.raises( + ExceptionGroup, match=r"\[dependency-groups\] data for 'all' was malformed" + ) as excinfo: + resolve_dependency_groups(groups, "all") + + assert _group_contains( + excinfo, + CyclicDependencyGroup, + match=( + "Cyclic dependency group include while resolving all: " + "self-reference includes itself" + ), + ) + assert _group_contains( + excinfo, + TypeError, + match=r"Dependency group 'invalid-type' contained a string rather than a list.", + )