diff --git a/.cspell.json b/.cspell.json index c5805be3..bae32ec1 100644 --- a/.cspell.json +++ b/.cspell.json @@ -24,6 +24,7 @@ "*.ico", ".editorconfig", ".envrc", + ".github/dependabot.yml", ".gitignore", ".gitpod.*", ".pre-commit-config.yaml", @@ -39,7 +40,6 @@ ], "ignoreWords": [ "FURB", - "MAINT", "PyPI", "addopts", "argparse", diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 90891ae8..c658ead2 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,16 +1,14 @@ version: 2 + +multi-ecosystem-groups: + lock: + assignees: [redeboer] + commit-message: { prefix: MAINT } + schedule: { interval: quarterly } + updates: - package-ecosystem: github-actions directory: "/" - assignees: - - redeboer - commit-message: - prefix: MAINT - groups: - actions: - patterns: - - "*" - labels: - - ⬆️ Lock - schedule: - interval: monthly + labels: [⬆️ Lock] + multi-ecosystem-group: lock + patterns: ["*"] diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 048bf296..5f74ee76 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -9,8 +9,8 @@ on: jobs: milestone: if: startsWith(github.ref, 'refs/tags') - uses: ComPWA/actions/.github/workflows/close-milestone.yml@v3 + uses: ComPWA/actions/.github/workflows/close-milestone.yml@v4 push: if: startsWith(github.ref, 'refs/tags') && !github.event.release.prerelease secrets: inherit - uses: ComPWA/actions/.github/workflows/push-to-version-branches.yml@v3 + uses: ComPWA/actions/.github/workflows/push-to-version-branches.yml@v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 50a30498..aede715b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,32 +17,24 @@ on: - epic/* - "[0-9]+.[0-9]+.x" workflow_dispatch: - inputs: - specific-pip-packages: - description: Run CI with specific pip packages - required: false - type: string jobs: doc: - uses: ComPWA/actions/.github/workflows/ci-docs.yml@v3.0 + uses: ComPWA/actions/.github/workflows/ci-docs.yml@v4.0 permissions: pages: write id-token: write with: gh-pages: true - specific-pip-packages: ${{ inputs.specific-pip-packages }} - pytest: - uses: ComPWA/actions/.github/workflows/pytest.yml@v3.0 - with: - coverage-target: compwa_policy - macos-python-version: "3.10" - multithreaded: false - specific-pip-packages: ${{ inputs.specific-pip-packages }} - secrets: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} style: - if: inputs.specific-pip-packages == '' secrets: token: ${{ secrets.PAT }} - uses: ComPWA/actions/.github/workflows/pre-commit.yml@v3.0 + uses: ComPWA/actions/.github/workflows/pre-commit.yml@v4.0 + test: + uses: ComPWA/actions/.github/workflows/test.yml@v4.0 + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + coverage-python-version: "3.13" + macos-python-version: "3.10" + multithreaded: false diff --git a/.github/workflows/clean-caches.yml b/.github/workflows/clean-caches.yml index 2de0c959..b241bf56 100644 --- a/.github/workflows/clean-caches.yml +++ b/.github/workflows/clean-caches.yml @@ -16,7 +16,7 @@ jobs: name: Remove caches runs-on: ubuntu-24.04 steps: - - uses: ComPWA/actions/clean-caches@v3 + - uses: ComPWA/actions/clean-caches@v4 with: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} ref: ${{ inputs.ref }} diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml index 4217fe2f..ed00e508 100644 --- a/.github/workflows/lock.yml +++ b/.github/workflows/lock.yml @@ -16,6 +16,6 @@ on: jobs: lock: - uses: ComPWA/actions/.github/workflows/lock.yml@v3 + uses: ComPWA/actions/.github/workflows/lock.yml@v4 secrets: token: ${{ secrets.PAT }} diff --git a/.github/workflows/pr-linting.yml b/.github/workflows/pr-linting.yml index 638dfc1e..e15b56f9 100644 --- a/.github/workflows/pr-linting.yml +++ b/.github/workflows/pr-linting.yml @@ -11,4 +11,4 @@ on: jobs: lint-pr: - uses: ComPWA/actions/.github/workflows/pr-linting.yml@v3 + uses: ComPWA/actions/.github/workflows/pr-linting.yml@v4 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c1a1e958..f1482697 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - id: check-useless-excludes - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.14.11 + rev: v0.14.13 hooks: - id: ruff-check args: [--fix] @@ -49,7 +49,6 @@ repos: pass_filenames: false args: - --allow-labels - - --dependabot=update - --keep-local-precommit - --no-pypi - --pytest-single-threaded @@ -64,7 +63,7 @@ repos: files: ^\.pre\-commit\-(config|hooks)\.yaml$ - repo: https://github.com/ComPWA/prettier-pre-commit - rev: v3.4.2 + rev: v3.8.0 hooks: - id: prettier diff --git a/.vscode/settings.json b/.vscode/settings.json index ac45e6f0..38540f29 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -42,7 +42,6 @@ }, "notebook.formatOnSave.enabled": true, "python.defaultInterpreterPath": ".venv/bin/python", - "python.languageServer": "None", "python.terminal.activateEnvironment": false, "python.testing.pytestEnabled": true, "python.testing.unittestEnabled": false, diff --git a/docs/conf.py b/docs/conf.py index 8041b25c..323321da 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,11 +13,8 @@ api_target_substitutions: dict[str, str | tuple[str, str]] = { "Array": "tomlkit.items.Array", "ConfigParser": "configparser.ConfigParser", - "DependabotOption": ( - "obj", - "compwa_policy.check_dev_files.dependabot.DependabotOption", - ), "Frequency": "typing.Literal", + "InlineTable": "tomlkit.items.InlineTable", "IO": "typing.IO", "Iterable": "typing.Iterable", "K": "typing.TypeVar", @@ -39,8 +36,13 @@ "T": "typing.TypeVar", "Table": "tomlkit.items.Table", "TOMLDocument": "tomlkit.TOMLDocument", + "ty.TypeChecker": ("obj", "compwa_policy.check_dev_files.ty.TypeChecker"), "TypeChecker": ("obj", "compwa_policy.check_dev_files.ty.TypeChecker"), "typing_extensions.NotRequired": ("obj", "typing.NotRequired"), + "upgrade_lock.Frequency": ( + "obj", + "compwa_policy.check_dev_files.upgrade_lock.Frequency", + ), "V": "typing.TypeVar", } author = "Common Partial Wave Analysis" diff --git a/pyproject.toml b/pyproject.toml index dd490833..fd100447 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ dependencies = [ "pip-tools", "rtoml", # fast, read-only parsing "ruamel.yaml", # better YAML dumping + "setuptools", "tomlkit", # preserve original TOML formatting 'more-itertools; python_version <"3.10.0"', # pairwise 'typing-extensions; python_version <"3.12.0"', # override @@ -58,7 +59,6 @@ Tracker = "https://github.com/ComPWA/policy/issues" dev = [ "labels", "ruff", - "sphinx-autobuild", {include-group = "doc"}, {include-group = "style"}, {include-group = "test"}, @@ -130,7 +130,8 @@ no-group = "dev" type = "uv" [tool.poe.tasks.all] -help = "Run all continuous integration tasks locally" +help = "Run all continuous integration (CI) tasks locally" +ignore_fail = "return_non_zero" sequence = [ "cov", "doc", @@ -141,13 +142,13 @@ sequence = [ [tool.poe.tasks.cov] cmd = """ -uv run --group test --isolated --no-dev \ - pytest \ - --cov-fail-under=32 \ - --cov-report=html \ - --cov-report=xml \ - ${paths} +pytest \ + --cov-fail-under=32 \ + --cov-report=html \ + --cov-report=xml \ + ${paths} """ +executor = {group = "test"} help = "Compute how much of the source code is covered by tests" [[tool.poe.tasks.cov.args]] @@ -188,8 +189,8 @@ sphinx-autobuild \ docs/ \ docs/_build/html/ """ -executor = {group = "doc"} -help = "Run all unit tests" +executor = {group = "doc", with = "sphinx-autobuild"} +help = "Set up a server to directly preview changes to the HTML pages" [tool.poe.tasks.linkcheck] cmd = """ @@ -221,31 +222,40 @@ positional = true [tool.poe.tasks.test-all] help = "Run all tests on each supported Python version" sequence = [ - {ref = "test-py 3.10"}, - {ref = "test-py 3.11"}, - {ref = "test-py 3.12"}, - {ref = "test-py 3.13"}, - {ref = "test-py 3.14"}, + {ref = "test-py310 ${paths}"}, + {ref = "test-py311 ${paths}"}, + {ref = "test-py312 ${paths}"}, + {ref = "test-py313 ${paths}"}, + {ref = "test-py314 ${paths}"}, ] -[tool.poe.tasks.test-py] -cmd = """ -uv run \ - --group=test \ - --isolated \ - --no-dev \ - --python=${python} \ - pytest --no-summary -""" -help = "Run all tests on a specific Python version" - -[[tool.poe.tasks.test-py.args]] -default = "3.13" -help = "Selected Python version" -name = "python" +[[tool.poe.tasks.test-all.args]] +default = "" +multiple = true +name = "paths" positional = true -[tool.pytest.ini_options] +[tool.poe.tasks.test-py310] +env = {UV_PYTHON = "3.10"} +ref = "test" + +[tool.poe.tasks.test-py311] +env = {UV_PYTHON = "3.11"} +ref = "test" + +[tool.poe.tasks.test-py312] +env = {UV_PYTHON = "3.12"} +ref = "test" + +[tool.poe.tasks.test-py313] +env = {UV_PYTHON = "3.13"} +ref = "test" + +[tool.poe.tasks.test-py314] +env = {UV_PYTHON = "3.14"} +ref = "test" + +[tool.pytest] addopts = [ "--color=yes", "--doctest-continue-on-failure", @@ -259,6 +269,7 @@ filterwarnings = [ "ignore: Importing ErrorTree directly from the jsonschema package is deprecated.*", "ignore: The `hash` argument is deprecated in favor of `unsafe_hash` and will be removed in or after August 2025.:DeprecationWarning", ] +minversion = "9.0" testpaths = [ "src", "tests", @@ -316,7 +327,6 @@ task-tags = ["cspell"] ban-relative-imports = "all" [tool.ruff.lint.isort] -known-first-party = ["compwa_policy"] split-on-trailing-comma = false [tool.ruff.lint.per-file-ignores] diff --git a/src/compwa_policy/.github/workflows/cd.yml b/src/compwa_policy/.github/workflows/cd.yml index 4e32c200..d1243746 100644 --- a/src/compwa_policy/.github/workflows/cd.yml +++ b/src/compwa_policy/.github/workflows/cd.yml @@ -9,9 +9,9 @@ on: jobs: milestone: if: startsWith(github.ref, 'refs/tags') - uses: ComPWA/actions/.github/workflows/close-milestone.yml@v3 + uses: ComPWA/actions/.github/workflows/close-milestone.yml@v4 package-name: - uses: ComPWA/actions/.github/workflows/get-pypi-name.yml@v3 + uses: ComPWA/actions/.github/workflows/get-pypi-name.yml@v4 pypi: environment: name: PyPI @@ -24,9 +24,9 @@ jobs: id-token: write runs-on: ubuntu-24.04 steps: - - uses: ComPWA/actions/build-pypi-distribution@v3 + - uses: ComPWA/actions/build-pypi-distribution@v4 - uses: pypa/gh-action-pypi-publish@release/v1 push: if: startsWith(github.ref, 'refs/tags') && !github.event.release.prerelease secrets: inherit - uses: ComPWA/actions/.github/workflows/push-to-version-branches.yml@v3 + uses: ComPWA/actions/.github/workflows/push-to-version-branches.yml@v4 diff --git a/src/compwa_policy/.github/workflows/ci.yml b/src/compwa_policy/.github/workflows/ci.yml index fb28edd7..5c6659e9 100644 --- a/src/compwa_policy/.github/workflows/ci.yml +++ b/src/compwa_policy/.github/workflows/ci.yml @@ -20,26 +20,16 @@ on: - epic/* - "[0-9]+.[0-9]+.x" workflow_dispatch: - inputs: - specific-pip-packages: - description: Run CI with specific pip packages - required: false - type: string jobs: doc: - uses: ComPWA/actions/.github/workflows/ci-docs.yml@v3.0 + uses: ComPWA/actions/.github/workflows/ci-docs.yml@v4.0 permissions: pages: write id-token: write - with: - specific-pip-packages: ${{ inputs.specific-pip-packages }} - pytest: - uses: ComPWA/actions/.github/workflows/pytest.yml@v3.0 - with: - specific-pip-packages: ${{ inputs.specific-pip-packages }} style: - if: inputs.specific-pip-packages == '' secrets: token: ${{ secrets.PAT }} - uses: ComPWA/actions/.github/workflows/pre-commit.yml@v3.0 + uses: ComPWA/actions/.github/workflows/pre-commit.yml@v4.0 + test: + uses: ComPWA/actions/.github/workflows/test.yml@v4.0 diff --git a/src/compwa_policy/.github/workflows/lock.yml b/src/compwa_policy/.github/workflows/lock.yml index 3d7cb8b5..51ac323d 100644 --- a/src/compwa_policy/.github/workflows/lock.yml +++ b/src/compwa_policy/.github/workflows/lock.yml @@ -22,6 +22,6 @@ on: jobs: lock: - uses: ComPWA/actions/.github/workflows/lock.yml@v3 + uses: ComPWA/actions/.github/workflows/lock.yml@v4 secrets: token: ${{ secrets.PAT }} diff --git a/src/compwa_policy/.template/.cspell.json b/src/compwa_policy/.template/.cspell.json index 34f7fb34..9efd2fd9 100644 --- a/src/compwa_policy/.template/.cspell.json +++ b/src/compwa_policy/.template/.cspell.json @@ -35,6 +35,7 @@ ".editorconfig", ".envrc", ".gitattributes", + ".github/dependabot.yml", ".gitignore", ".gitpod.*", ".htaccess", @@ -63,7 +64,6 @@ "update-www" ], "ignoreWords": [ - "MAINT", "PyPI", "argparse", "autonumbering", diff --git a/src/compwa_policy/check_dev_files/__init__.py b/src/compwa_policy/check_dev_files/__init__.py index 7fbb3a1b..6b677ffc 100644 --- a/src/compwa_policy/check_dev_files/__init__.py +++ b/src/compwa_policy/check_dev_files/__init__.py @@ -2,11 +2,12 @@ from __future__ import annotations +import argparse import os -import re import sys -from argparse import ArgumentParser, Namespace -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING + +from attrs import frozen from compwa_policy.check_dev_files import ( binder, @@ -37,15 +38,16 @@ ruff, toml, ty, - update_lock, + upgrade_lock, uv, vscode, ) +from compwa_policy.check_dev_files._characterization import has_python_code from compwa_policy.check_dev_files.deprecated import remove_deprecated_tools from compwa_policy.config import DEFAULT_DEV_PYTHON_VERSION, PythonVersion from compwa_policy.utilities import CONFIG_PATH from compwa_policy.utilities.executor import Executor -from compwa_policy.utilities.match import git_ls_files, matches_patterns +from compwa_policy.utilities.match import is_committed from compwa_policy.utilities.precommit import ModifiablePrecommit from compwa_policy.utilities.pyproject import Pyproject @@ -56,30 +58,18 @@ def main(argv: Sequence[str] | None = None) -> int: # noqa: C901, PLR0915 - parser = _create_argparse() - args = parser.parse_args(argv) + args = _parse_arguments(argv) doc_apt_packages = _to_list(args.doc_apt_packages) environment_variables = _get_environment_variables(args.environment_variables) - is_python_repo = not args.no_python - macos_python_version = ( - None if args.macos_python_version == "disable" else args.macos_python_version - ) - repo_name, repo_title = _determine_repo_name_and_title(args) - has_notebooks = any( - matches_patterns(file, ["**/*.ipynb"]) for file in git_ls_files(untracked=True) - ) - use_gitpod = args.gitpod - dev_python_version = __get_python_version(args.dev_python_version) - excluded_python_versions = set(_to_list(args.excluded_python_versions)) - package_manager: PackageManagerChoice = args.package_manager - type_checkers: set[ty.TypeChecker] = set(args.type_checker or []) + is_python_repo = has_python_code() if args.python is None else args.python + has_notebooks = is_committed("**/*.ipynb") if CONFIG_PATH.pyproject.exists(): supported_versions = Pyproject.load().get_supported_python_versions() - if supported_versions and dev_python_version not in supported_versions: + if supported_versions and args.dev_python_version not in supported_versions: print( # noqa: T201 - f"The specified development Python version {dev_python_version} is not " - "listed in the supported Python versions of pyproject.toml: " - f"{', '.join(sorted(supported_versions))}" + f"The specified development Python version {args.dev_python_version} is" + " not listed in the supported Python versions of pyproject.toml:" + f" {', '.join(sorted(supported_versions))}" ) return 1 with ( @@ -88,8 +78,8 @@ def main(argv: Sequence[str] | None = None) -> int: # noqa: C901, PLR0915 ): do(citation.main, precommit_config) do(commitlint.main) - do(conda.main, dev_python_version, package_manager) - do(dependabot.main, args.dependabot) + do(conda.main, args.dev_python_version, args.package_manager) + do(dependabot.main, args.upgrade_frequency) do(editorconfig.main, precommit_config) if not args.allow_labels: do(github_labels.main) @@ -102,19 +92,23 @@ def main(argv: Sequence[str] | None = None) -> int: # noqa: C901, PLR0915 environment_variables=environment_variables, github_pages=args.github_pages, keep_pr_linting=args.keep_pr_linting, - macos_python_version=macos_python_version, + macos_python_version=args.macos_python_version, no_cd=args.no_cd, no_milestones=args.no_milestones, no_pypi=args.no_pypi, no_version_branches=args.no_version_branches, - python_version=dev_python_version, + python_version=args.dev_python_version, single_threaded=args.pytest_single_threaded, skip_tests=_to_list(args.ci_skipped_tests), - test_extras=_to_list(args.ci_test_extras), ) if has_notebooks: if not args.no_binder: - do(binder.main, package_manager, dev_python_version, doc_apt_packages) + do( + binder.main, + args.package_manager, + args.dev_python_version, + doc_apt_packages, + ) do(jupyter.main, args.no_ruff) do( nbstripout.main, @@ -124,13 +118,13 @@ def main(argv: Sequence[str] | None = None) -> int: # noqa: C901, PLR0915 ) do( pixi.main, - package_manager, + args.package_manager, is_python_repo, - dev_python_version, + args.dev_python_version, ) - do(direnv.main, package_manager, environment_variables) + do(direnv.main, args.package_manager, environment_variables) do(toml.main, precommit_config) # has to run before pre-commit - do(poe.main, has_notebooks) + do(poe.main, has_notebooks, args.package_manager) do(prettier.main, precommit_config) if is_python_repo: if args.no_ruff: @@ -139,49 +133,69 @@ def main(argv: Sequence[str] | None = None) -> int: # noqa: C901, PLR0915 do( release_drafter.main, args.no_cd, - repo_name, - repo_title, + args.repo_name, + args.repo_title, args.repo_organization, ) - do(pyproject.main, excluded_python_versions) - do(mypy.main, "mypy" in type_checkers, precommit_config) - do(pyright.main, "pyright" in type_checkers, precommit_config) - do(ty.main, type_checkers, args.keep_local_precommit, precommit_config) + do(pyproject.main, args.excluded_python_versions) + do(mypy.main, "mypy" in args.type_checker, precommit_config) + do(pyright.main, "pyright" in args.type_checker, precommit_config) + do(ty.main, args.type_checker, args.keep_local_precommit, precommit_config) do(pytest.main, args.pytest_single_threaded) do(pyupgrade.main, precommit_config, args.no_ruff) if not args.no_ruff: do(ruff.main, precommit_config, has_notebooks, args.imports_on_top) - if args.update_lock_files != "no": + if args.upgrade_frequency != "no": do( - update_lock.main, + upgrade_lock.main, precommit_config, - frequency=args.update_lock_files, + frequency=args.upgrade_frequency, ) - do(readthedocs.main, package_manager, dev_python_version) + do(readthedocs.main, args.package_manager, args.dev_python_version) do(remove_deprecated_tools, precommit_config, args.keep_issue_templates) - do(vscode.main, has_notebooks, is_python_repo, package_manager) - do(gitpod.main, use_gitpod, dev_python_version) + do(vscode.main, has_notebooks, is_python_repo, args.package_manager) + do(gitpod.main, args.gitpod, args.dev_python_version) do(precommit.main, precommit_config, has_notebooks) do( uv.main, - dev_python_version, - package_manager, precommit_config, + args.dev_python_version, + args.keep_contributing_md, + args.package_manager, args.repo_organization, - repo_name, + args.repo_name, ) do(cspell.main, precommit_config, args.no_cspell_update) return 1 if do.error_messages else 0 -def _create_argparse() -> ArgumentParser: - parser = ArgumentParser(__doc__) +def _parse_arguments(argv: Sequence[str] | None = None) -> Arguments: + parser = _create_argparse() + args = parser.parse_args(argv) + args.excluded_python_versions = set(_to_list(args.excluded_python_versions)) + args.macos_python_version = ( + None if args.macos_python_version == "disable" else args.macos_python_version + ) + args.repo_name = args.repo_name or os.path.basename(os.getcwd()) + args.repo_title = args.repo_title or args.repo_name + args.type_checker = set(args.type_checker or []) + return Arguments(**args.__dict__) + + +def _create_argparse() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(__doc__) parser.add_argument( "--allow-deprecated-workflows", action="store_true", default=False, help="Allow deprecated CI workflows, such as ci-docs.yml.", ) + parser.add_argument( + "--allow-labels", + action="store_true", + default=False, + help="Do not perform the check on labels.toml", + ) parser.add_argument( "--allowed-cell-metadata", default="", @@ -195,16 +209,10 @@ def _create_argparse() -> ArgumentParser: type=str, ) parser.add_argument( - "--ci-test-extras", - default="", - help="Comma-separated list of extras that are required for running tests on CI", - type=str, - ) - parser.add_argument( - "--dependabot", - choices=dependabot.DependabotOption.__args__, - default=None, - help="Leave dependabot.yml untouched ('keep') or sync with ComPWA/policy", + "--dev-python-version", + choices=PythonVersion.__args__, + default=DEFAULT_DEV_PYTHON_VERSION, + help="Specify the Python version for your developer environment", ) parser.add_argument( "--doc-apt-packages", @@ -236,6 +244,12 @@ def _create_argparse() -> ArgumentParser: default=False, help="Create a GitPod config file", ) + parser.add_argument( + "--imports-on-top", + action="store_true", + default=False, + help="Sort notebook imports on the top", + ) parser.add_argument( "--keep-issue-templates", help="Do not remove the .github/ISSUE_TEMPLATE directory", @@ -243,16 +257,28 @@ def _create_argparse() -> ArgumentParser: default=False, ) parser.add_argument( - "--keep-pr-linting", - help="Do not overwrite the PR linting workflow", + "--keep-local-precommit", action="store_true", default=False, + help="Do not remove local pre-commit hooks", ) parser.add_argument( - "--imports-on-top", + "--keep-contributing-md", action="store_true", default=False, - help="Sort notebook imports on the top", + help="Do not update or remove the CONTRIBUTING.md file", + ) + parser.add_argument( + "--keep-pr-linting", + help="Do not overwrite the PR linting workflow", + action="store_true", + default=False, + ) + parser.add_argument( + "--macos-python-version", + choices=[*sorted(PythonVersion.__args__), "disable"], + default="3.10", + help="Run the test job in MacOS on a specific Python version. Use 'disable' to not run the tests on MacOS.", ) parser.add_argument( "--no-binder", @@ -293,42 +319,18 @@ def _create_argparse() -> ArgumentParser: default=False, help="This repository does not use milestones and therefore no close workflow.", ) - parser.add_argument( - "--no-python", - action="store_true", - default=False, - help="Skip check that concern config files for Python projects.", - ) - parser.add_argument( - "--allow-labels", - action="store_true", - default=False, - help="Do not perform the check on labels.toml", - ) - parser.add_argument( - "--dev-python-version", - default=DEFAULT_DEV_PYTHON_VERSION, - help="Specify the Python version for your developer environment", - type=str, - ) - parser.add_argument( - "--keep-local-precommit", - action="store_true", - default=False, - help="Do not remove local pre-commit hooks", - ) - parser.add_argument( - "--macos-python-version", - choices=[*sorted(PythonVersion.__args__), "disable"], - default="3.10", - help="Run the test job in MacOS on a specific Python version. Use 'disable' to not run the tests on MacOS.", - ) parser.add_argument( "--no-pypi", action="store_true", default=False, help="Do not publish package to PyPI", ) + parser.add_argument( + "--python", + action=argparse.BooleanOptionalAction, + default=None, + help="Specify whether this repository contains Python code (default: automatic detection)", + ) parser.add_argument( "--no-ruff", action="store_true", @@ -348,22 +350,6 @@ def _create_argparse() -> ArgumentParser: help="Specify which package manager to use for the project", type=str, ) - parser.add_argument( - "--type-checker", - action="append", - choices=ty.TypeChecker.__args__, - help="Specify which type checker to use for the project", - ) - parser.add_argument( - "--update-lock-files", - choices=update_lock.Frequency.__args__, - default="outsource", - help=( - "Add a workflow to upgrade lock files, like uv.lock, .pre-commit-config.yml, " - "and pip .constraints/ files. The argument is the frequency of the cron job" - ), - type=str, - ) parser.add_argument( "--pytest-single-threaded", action="store_true", @@ -394,15 +380,58 @@ def _create_argparse() -> ArgumentParser: ), type=str, ) + parser.add_argument( + "--type-checker", + action="append", + choices=ty.TypeChecker.__args__, + help="Specify which type checker to use for the project", + ) + parser.add_argument( + "--upgrade-frequency", + choices=upgrade_lock.Frequency.__args__, + default="quarterly", + help=( + "Add a workflow to upgrade lock files, like uv.lock, .pre-commit-config.yml, " + "and pip .constraints/ files. The argument is the frequency of the cron job" + ), + ) return parser -def _determine_repo_name_and_title(args: Namespace) -> tuple[str, str]: - repo_name = args.repo_name - if not repo_name: - repo_name = os.path.basename(os.getcwd()) - repo_title = args.repo_title or repo_name - return repo_name, repo_title +@frozen +class Arguments: + allow_deprecated_workflows: bool + allow_labels: bool + allowed_cell_metadata: str + ci_skipped_tests: str + dev_python_version: PythonVersion + doc_apt_packages: str + environment_variables: str + excluded_python_versions: set[PythonVersion] + github_pages: bool + gitpod: bool + imports_on_top: bool + keep_contributing_md: bool + keep_issue_templates: bool + keep_local_precommit: bool + keep_pr_linting: bool + macos_python_version: PythonVersion | None + no_binder: bool + no_cd: bool + no_cspell_update: bool + no_github_actions: bool + no_milestones: bool + no_pypi: bool + no_ruff: bool + no_version_branches: bool + package_manager: PackageManagerChoice + pytest_single_threaded: bool + python: bool | None + repo_name: str + repo_organization: str + repo_title: str + type_checker: set[ty.TypeChecker] + upgrade_frequency: upgrade_lock.Frequency def _get_environment_variables(arg: str) -> dict[str, str]: @@ -458,16 +487,5 @@ def _to_list(arg: str) -> list[str]: return sorted(space_separated.split(" ")) -def __get_python_version(arg: Any) -> PythonVersion: - if not isinstance(arg, str): - msg = f"--dev-python-version must be a string, not {type(arg).__name__}" - raise TypeError(msg) - arg = arg.strip() - if not re.match(r"^3\.\d+$", arg): - msg = f"Invalid Python version: {arg}" - raise ValueError(msg) - return arg - - if __name__ == "__main__": sys.exit(main()) diff --git a/src/compwa_policy/check_dev_files/_characterization.py b/src/compwa_policy/check_dev_files/_characterization.py new file mode 100644 index 00000000..079c8c92 --- /dev/null +++ b/src/compwa_policy/check_dev_files/_characterization.py @@ -0,0 +1,19 @@ +"""Characterization of repository.""" + +from functools import cache + +from compwa_policy.utilities.match import is_committed + + +@cache +def has_documentation() -> bool: + if is_committed("docs/**"): + return True + if is_committed("_quarto.yml", "**/_quarto.yml", ":!:tests"): + return True + return is_committed("conf.py", "**/conf.py", ":!:tests") + + +@cache +def has_python_code() -> bool: + return is_committed("**/*.ipynb", "**/*.py", "**/*.pyi") diff --git a/src/compwa_policy/check_dev_files/dependabot.py b/src/compwa_policy/check_dev_files/dependabot.py index 844a4d74..98f4327e 100644 --- a/src/compwa_policy/check_dev_files/dependabot.py +++ b/src/compwa_policy/check_dev_files/dependabot.py @@ -2,51 +2,66 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Literal +from copy import deepcopy +from functools import cache +from typing import TYPE_CHECKING, Any, cast + +import yaml from compwa_policy.errors import PrecommitError from compwa_policy.utilities import COMPWA_POLICY_DIR, CONFIG_PATH +from compwa_policy.utilities.match import is_committed +from compwa_policy.utilities.yaml import create_prettier_round_trip_yaml if TYPE_CHECKING: - from pathlib import Path + from compwa_policy.check_dev_files.upgrade_lock import Frequency + -DependabotOption = Literal["keep", "update"] -"""Allowed options for the :code:`--dependabot` argument.""" +def main(frequency: Frequency) -> None: + def dump_dependabot_config() -> None: + dependabot_path.parent.mkdir(exist_ok=True) + rt_yaml.dump(expected, dependabot_path) + msg = f"Updated {dependabot_path}" + raise PrecommitError(msg) + def append_ecosystem(ecosystem_name: str) -> None: + new_ecosystem = deepcopy(github_actions_ecosystem) # avoid YAML anchors + new_ecosystem["package-ecosystem"] = ecosystem_name + package_ecosystems.append(new_ecosystem) -def main(allow_dependabot: DependabotOption | None) -> None: dependabot_path = CONFIG_PATH.github_workflow_dir.parent / "dependabot.yml" - if allow_dependabot is None: - _remove_dependabot(dependabot_path) - elif allow_dependabot == "update": - _update_dependabot(dependabot_path) + template_path = COMPWA_POLICY_DIR / dependabot_path + rt_yaml = create_prettier_round_trip_yaml() + expected = rt_yaml.load(template_path) + if frequency is not None: + expected["multi-ecosystem-groups"]["lock"]["schedule"]["interval"] = frequency + package_ecosystems = cast("list[dict[str, Any]]", expected["updates"]) + github_actions_ecosystem = package_ecosystems[0] + if not is_committed(f"{CONFIG_PATH.github_workflow_dir / '*.yml'}"): + package_ecosystems.pop(0) + if is_committed("**/Manifest.toml"): + append_ecosystem("julia") + if is_committed("uv.lock"): + append_ecosystem("uv") -def _remove_dependabot(dependabot_path: Path) -> None: - if not dependabot_path.exists(): + if not package_ecosystems: + dependabot_path.unlink(missing_ok=True) + msg = f"Removed {dependabot_path}" + raise PrecommitError(msg) return - dependabot_path.unlink() - msg = ( - f"Removed {dependabot_path}, because it is GitHub workflows have been" - " outsourced to https://github.com/ComPWA/actions" - ) - raise PrecommitError(msg) + if not dependabot_path.exists(): + dump_dependabot_config() + existing = rt_yaml.load(dependabot_path) + if existing != expected: + dump_dependabot_config() -def _update_dependabot(dependabot_path: Path) -> None: - template_path = COMPWA_POLICY_DIR / dependabot_path - with open(template_path) as f: - template = f.read() +@cache +def get_dependabot_ecosystems() -> set[str]: + dependabot_path = CONFIG_PATH.github_workflow_dir.parent / "dependabot.yml" if not dependabot_path.exists(): - __dump_dependabot_template(template, dependabot_path) - with open(dependabot_path) as f: - dependabot = f.read() - if dependabot != template: - __dump_dependabot_template(template, dependabot_path) - - -def __dump_dependabot_template(content: str, path: Path) -> None: - with open(path, "w") as f: - f.write(content) - msg = f"Updated {path}" - raise PrecommitError(msg) + return set() + with dependabot_path.open("r") as stream: + config = yaml.load(stream, Loader=yaml.SafeLoader) + return {entry["package-ecosystem"] for entry in config["updates"]} diff --git a/src/compwa_policy/check_dev_files/editorconfig.py b/src/compwa_policy/check_dev_files/editorconfig.py index fa757b6b..25c446e3 100644 --- a/src/compwa_policy/check_dev_files/editorconfig.py +++ b/src/compwa_policy/check_dev_files/editorconfig.py @@ -13,7 +13,7 @@ from ruamel.yaml.scalarstring import FoldedScalarString from compwa_policy.utilities import CONFIG_PATH -from compwa_policy.utilities.match import filter_files +from compwa_policy.utilities.match import git_ls_files from compwa_policy.utilities.precommit.struct import Hook, Repo if TYPE_CHECKING: @@ -31,7 +31,7 @@ def _update_precommit_config(precommit: ModifiablePrecommit) -> None: name="editorconfig", alias="ec", ) - if filter_files(["**/*.py"]): + if git_ls_files("**/*.py"): msg = R""" (?x)^( .*\.py diff --git a/src/compwa_policy/check_dev_files/github_labels.py b/src/compwa_policy/check_dev_files/github_labels.py index d15a6bbe..c8b384fb 100644 --- a/src/compwa_policy/check_dev_files/github_labels.py +++ b/src/compwa_policy/check_dev_files/github_labels.py @@ -7,11 +7,11 @@ from __future__ import annotations import os -from functools import lru_cache +from functools import cache from pathlib import Path from compwa_policy.errors import PrecommitError -from compwa_policy.utilities.match import filter_files +from compwa_policy.utilities.match import git_ls_files __LABELS_CONFIG_FILE = "labels.toml" @@ -50,13 +50,12 @@ def _check_has_labels_requirement(path: Path) -> bool: return False -@lru_cache(maxsize=1) +@cache def _get_requirement_files() -> list[Path]: - patterns = [ + filenames = git_ls_files( "**/requirements*.in", "**/requirements*.txt", - ] - filenames = filter_files(patterns) + ) return [Path(file) for file in filenames] diff --git a/src/compwa_policy/check_dev_files/github_workflows.py b/src/compwa_policy/check_dev_files/github_workflows.py index b72605ba..002a8b1a 100644 --- a/src/compwa_policy/check_dev_files/github_workflows.py +++ b/src/compwa_policy/check_dev_files/github_workflows.py @@ -5,10 +5,12 @@ import os import re import shutil +from pathlib import Path from typing import TYPE_CHECKING, cast from ruamel.yaml.scalarstring import DoubleQuotedScalarString +from compwa_policy.check_dev_files._characterization import has_documentation from compwa_policy.config import DEFAULT_DEV_PYTHON_VERSION from compwa_policy.errors import PrecommitError from compwa_policy.utilities import ( @@ -19,16 +21,10 @@ write, ) from compwa_policy.utilities.executor import Executor -from compwa_policy.utilities.pyproject import ( - Pyproject, - PythonVersion, - has_pyproject_package_name, -) +from compwa_policy.utilities.pyproject import PythonVersion, has_pyproject_package_name from compwa_policy.utilities.yaml import create_prettier_round_trip_yaml if TYPE_CHECKING: - from pathlib import Path - from ruamel.yaml.comments import CommentedMap from ruamel.yaml.main import YAML @@ -51,7 +47,6 @@ def main( python_version: PythonVersion, single_threaded: bool, skip_tests: list[str], - test_extras: list[str], ) -> None: with Executor() as do: if no_cd: @@ -69,7 +64,6 @@ def main( python_version, single_threaded, skip_tests, - test_extras, ) if not keep_pr_linting: do(_update_pr_linting) @@ -123,7 +117,7 @@ def _update_pr_linting() -> None: output_path.parent.mkdir(exist_ok=True) if not output_path.exists() or hash_file(input_path) != hash_file(output_path): shutil.copyfile(input_path, output_path) - msg = f'Updated "{output_path}" workflow' + msg = f"Updated {output_path} workflow" raise PrecommitError(msg) @@ -137,7 +131,6 @@ def _update_ci_workflow( # noqa: PLR0917 python_version: PythonVersion, single_threaded: bool, skip_tests: list[str], - test_extras: list[str], ) -> None: def update() -> None: yaml, expected_data = _get_ci_workflow( @@ -150,7 +143,6 @@ def update() -> None: python_version, single_threaded, skip_tests, - test_extras, ) workflow_path = CONFIG_PATH.github_workflow_dir / "ci.yml" if not expected_data.get("jobs"): @@ -186,15 +178,12 @@ def _get_ci_workflow( # noqa: PLR0917 python_version: PythonVersion, single_threaded: bool, skip_tests: list[str], - test_extras: list[str], ) -> tuple[YAML, dict]: yaml = create_prettier_round_trip_yaml() config = yaml.load(path) __update_env_section(config, environment_variables) __update_doc_section(config, doc_apt_packages, python_version, github_pages) - __update_pytest_section( - config, macos_python_version, single_threaded, skip_tests, test_extras - ) + __update_pytest_section(config, macos_python_version, single_threaded, skip_tests) __update_style_section(config, python_version, precommit) return yaml, config @@ -217,17 +206,19 @@ def __update_doc_section( python_version: PythonVersion, github_pages: bool, ) -> None: - if not os.path.exists("docs/"): - del config["jobs"]["doc"] - else: - with_section = config["jobs"]["doc"]["with"] + if has_documentation(): + with_section = {} if python_version != DEFAULT_DEV_PYTHON_VERSION: with_section["python-version"] = DoubleQuotedScalarString(python_version) if apt_packages: with_section["apt-packages"] = " ".join(apt_packages) if not CONFIG_PATH.readthedocs.exists() or github_pages: with_section["gh-pages"] = True + if with_section: + config["jobs"]["doc"]["with"] = with_section __update_with_section(config, job_name="doc") + else: + del config["jobs"]["doc"] def __update_style_section( @@ -251,21 +242,18 @@ def __update_pytest_section( macos_python_version: PythonVersion | None, single_threaded: bool, skip_tests: list[str], - test_extras: list[str], ) -> None: test_dir = "tests" if not os.path.exists(test_dir): - del config["jobs"]["pytest"] + del config["jobs"]["test"] else: - with_section = config["jobs"]["pytest"]["with"] - if test_extras: - with_section["additional-extras"] = " ".join(test_extras) + with_section = {} if CONFIG_PATH.codecov.exists(): - with_section["coverage-target"] = __get_package_name() + with_section["coverage-python-version"] = __get_coverage_python_version() secrets = { "CODECOV_TOKEN": "${{ secrets.CODECOV_TOKEN }}", } - config["jobs"]["pytest"]["secrets"] = secrets + config["jobs"]["test"]["secrets"] = secrets if macos_python_version is not None: with_section["macos-python-version"] = DoubleQuotedScalarString( macos_python_version @@ -277,33 +265,25 @@ def __update_pytest_section( output_path = f"{test_dir}/output/" if os.path.exists(output_path): with_section["test-output-path"] = output_path - __update_with_section(config, job_name="pytest") + if with_section: + config["jobs"]["test"]["with"] = with_section + __update_with_section(config, job_name="test") def __update_with_section(config: dict, job_name: str) -> None: - with_section = config["jobs"][job_name]["with"] + with_section = config["jobs"][job_name].get("with") if with_section: sorted_section = {k: with_section[k] for k in sorted(with_section)} config["jobs"][job_name]["with"] = sorted_section - else: + elif with_section is not None: del with_section -def __get_package_name() -> str: - pypi_name = Pyproject.load().get_package_name(raise_on_missing=True) - package_name = pypi_name.replace("-", "_").lower() - if os.path.exists(f"src/{package_name}/"): - return package_name - src_dirs = os.listdir("src/") - candidate_dirs = [ - s - for s in src_dirs - if s.startswith(pypi_name[0].lower()) - if not s.endswith(".egg-info") - ] - if candidate_dirs: - return min(candidate_dirs) - return min(src_dirs) +def __get_coverage_python_version() -> PythonVersion: + python_version_file = Path(".python-version") + if python_version_file.exists(): + return python_version_file.read_text().strip() # ty:ignore[invalid-return-type] + return DEFAULT_DEV_PYTHON_VERSION def _copy_workflow_file(filename: str) -> None: @@ -318,14 +298,14 @@ def _copy_workflow_file(filename: str) -> None: workflow_path = f"{CONFIG_PATH.github_workflow_dir}/{filename}" if not os.path.exists(workflow_path): write(expected_content, target=workflow_path) - msg = f'Created "{workflow_path}" workflow' + msg = f"Created {workflow_path} workflow" raise PrecommitError(msg) with open(workflow_path) as stream: existing_content = stream.read() if existing_content != expected_content: write(expected_content, target=workflow_path) - msg = f'Updated "{workflow_path}" workflow' + msg = f"Updated {workflow_path} workflow" raise PrecommitError(msg) @@ -362,7 +342,7 @@ def remove_workflow(filename: str) -> None: path = CONFIG_PATH.github_workflow_dir / filename if path.exists(): path.unlink() - msg = f'Removed deprecated "{filename}" workflow' + msg = f"Removed deprecated {filename} workflow" raise PrecommitError(msg) @@ -370,5 +350,5 @@ def update_workflow(yaml: YAML, config: dict, path: Path) -> None: path.parent.mkdir(exist_ok=True, parents=True) yaml.dump(config, path) verb = "Updated" if path.exists() else "Created" - msg = f'{verb} "{path}" workflow' + msg = f"{verb} {path} workflow" raise PrecommitError(msg) diff --git a/src/compwa_policy/check_dev_files/jupyter.py b/src/compwa_policy/check_dev_files/jupyter.py index ef68d572..308b9ee4 100644 --- a/src/compwa_policy/check_dev_files/jupyter.py +++ b/src/compwa_policy/check_dev_files/jupyter.py @@ -1,11 +1,8 @@ """Update the developer setup when using Jupyter notebooks.""" -from compwa_policy.utilities import vscode +from compwa_policy.utilities import CONFIG_PATH, vscode from compwa_policy.utilities.executor import Executor -from compwa_policy.utilities.pyproject import ( - ModifiablePyproject, - has_pyproject_package_name, -) +from compwa_policy.utilities.pyproject import ModifiablePyproject def main(no_ruff: bool) -> None: @@ -22,7 +19,7 @@ def main(no_ruff: bool) -> None: def _update_dev_requirements(no_ruff: bool) -> None: - if not has_pyproject_package_name(): + if not CONFIG_PATH.pyproject.exists(): return with ModifiablePyproject.load() as pyproject: supported_python_versions = pyproject.get_supported_python_versions() @@ -32,19 +29,26 @@ def _update_dev_requirements(no_ruff: bool) -> None: "jupyterlab", "jupyterlab-git", "jupyterlab-lsp", - "jupyterlab-myst", "jupyterlab-quickopen", # cspell:ignore quickopen - "python-lsp-server[rope]", + "python-lsp-server", } + # cspell:ignore executablebookproject + recommended_vscode_extensions = vscode.get_recommended_extensions() + if "executablebookproject.myst-highlight" in recommended_vscode_extensions: + packages.add("jupyterlab-myst") + else: + pyproject.remove_dependency("jupyterlab-myst") + if "quarto.quarto" in recommended_vscode_extensions: + packages.add("jupyterlab-quarto") + else: + pyproject.remove_dependency("jupyterlab-quarto") + pyproject.remove_dependency("python-lsp-server[rope]") if not no_ruff: pyproject.remove_dependency( "black", ignored_sections=["doc", "notebooks", "test"] ) pyproject.remove_dependency("isort") - ruff_packages = { - "jupyterlab-code-formatter", - "python-lsp-ruff", - } - packages.update(ruff_packages) + pyproject.remove_dependency("jupyterlab-code-formatter") + packages.add("jupyter-ruff") for package in sorted(packages): pyproject.add_dependency(package, dependency_group=["jupyter", "dev"]) diff --git a/src/compwa_policy/check_dev_files/pixi/_helpers.py b/src/compwa_policy/check_dev_files/pixi/_helpers.py index d52f34a8..125ed8db 100644 --- a/src/compwa_policy/check_dev_files/pixi/_helpers.py +++ b/src/compwa_policy/check_dev_files/pixi/_helpers.py @@ -1,12 +1,12 @@ from __future__ import annotations from compwa_policy.utilities import CONFIG_PATH -from compwa_policy.utilities.match import filter_files +from compwa_policy.utilities.match import git_ls_files from compwa_policy.utilities.pyproject import Pyproject def has_pixi_config(pyproject: Pyproject | None = None) -> bool: - if filter_files(["pixi.lock", "pixi.toml"]): + if git_ls_files("pixi.lock", "pixi.toml"): return True if pyproject is not None: return pyproject.has_table("tool.pixi") diff --git a/src/compwa_policy/check_dev_files/pixi/_update.py b/src/compwa_policy/check_dev_files/pixi/_update.py index b93a65f7..6591f09f 100644 --- a/src/compwa_policy/check_dev_files/pixi/_update.py +++ b/src/compwa_policy/check_dev_files/pixi/_update.py @@ -3,7 +3,6 @@ from typing import TYPE_CHECKING, Any import yaml -from tomlkit import inline_table from compwa_policy.check_dev_files.pixi._helpers import has_pixi_config from compwa_policy.errors import PrecommitError @@ -16,11 +15,10 @@ ) from compwa_policy.utilities.pyproject.setters import split_dependency_definition from compwa_policy.utilities.readme import add_badge -from compwa_policy.utilities.toml import to_toml_array +from compwa_policy.utilities.toml import to_inline_table, to_toml_array if TYPE_CHECKING: from collections.abc import MutableMapping - from pathlib import Path from tomlkit.items import Table @@ -45,9 +43,8 @@ def update_pixi_configuration( add_badge, "[![Pixi Badge](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/prefix-dev/pixi/main/assets/badge/v0.json)](https://pixi.sh)", ) - if config_path == CONFIG_PATH.pixi_toml: - do(_rename_workspace_table, config) - do(_define_minimal_project, config, config_path) + do(_rename_workspace_table, config) + do(_define_minimal_project, config) do(_import_conda_dependencies, config) do(_import_conda_environment, config) if package_manager == "pixi+uv": @@ -97,17 +94,17 @@ def _rename_workspace_table(config: ModifiablePyproject) -> None: project = __get_table(config, "project") workspace = __get_table(config, "workspace", create=True) workspace.update(project) - del config._document["project"] # noqa: SLF001 + if config._source == CONFIG_PATH.pyproject: # noqa: SLF001 + del config._document["tool"]["pixi"]["project"] # noqa: SLF001 + else: + del config._document["project"] # noqa: SLF001 msg = 'Renamed "project" table to "workspace" in Pixi configuration' config.changelog.append(msg) -def _define_minimal_project(config: ModifiablePyproject, path: Path) -> None: +def _define_minimal_project(config: ModifiablePyproject) -> None: """Create a minimal Pixi project definition if it does not exist.""" - if path == CONFIG_PATH.pixi_toml: - table_name = "workspace" - else: - table_name = "project" + table_name = "workspace" settings = __get_table(config, table_name, create=True) minimal_settings: dict[str, Any] = dict( channels=["conda-forge"], @@ -197,8 +194,9 @@ def _clean_up_task_env(config: ModifiablePyproject) -> None: local_env = task_table.get("env", {}) if not local_env: continue - expected = inline_table() - expected.update({k: v for k, v in local_env.items() if v != global_env.get(k)}) + expected = to_inline_table({ + k: v for k, v in local_env.items() if v != global_env.get(k) + }) if local_env != expected: if expected: task_table["env"] = expected @@ -218,8 +216,7 @@ def __load_pixi_environment_variables(config: ModifiablePyproject) -> dict[str, def _install_package_editable(config: ModifiablePyproject) -> None: - editable = inline_table() - editable.update({ + editable = to_inline_table({ "path": ".", "editable": True, }) @@ -262,8 +259,9 @@ def _update_dev_environment(config: ModifiablePyproject) -> None: if not __has_table(config, "project.optional-dependencies"): return optional_dependencies = __get_table(config, "project.optional-dependencies") - expected = inline_table() - expected["features"] = to_toml_array(sorted(optional_dependencies)) + expected = to_inline_table({ + "features": to_toml_array(sorted(optional_dependencies)) + }) environments = __get_table(config, "environments", create=True) if environments.get("default") != expected: environments["default"] = expected diff --git a/src/compwa_policy/check_dev_files/poe.py b/src/compwa_policy/check_dev_files/poe.py index 7bfbe0f3..e1e1a838 100644 --- a/src/compwa_policy/check_dev_files/poe.py +++ b/src/compwa_policy/check_dev_files/poe.py @@ -2,15 +2,29 @@ from __future__ import annotations +import re +from collections.abc import Mapping, MutableMapping, Sequence from pathlib import Path +from typing import TYPE_CHECKING, Any, cast +from compwa_policy.check_dev_files._characterization import has_documentation from compwa_policy.errors import PrecommitError from compwa_policy.utilities import CONFIG_PATH, remove_lines from compwa_policy.utilities.executor import Executor -from compwa_policy.utilities.pyproject import ModifiablePyproject, Pyproject +from compwa_policy.utilities.pyproject import ( + ModifiablePyproject, + Pyproject, + has_dependency, +) +from compwa_policy.utilities.toml import to_inline_table, to_toml_array +if TYPE_CHECKING: + from tomlkit.items import Array, Table -def main(has_notebooks: bool) -> None: + from compwa_policy.check_dev_files.conda import PackageManagerChoice + + +def main(has_notebooks: bool, package_manager: PackageManagerChoice) -> None: if not CONFIG_PATH.pyproject.is_file(): return with Executor() as do, ModifiablePyproject.load() as pyproject: @@ -20,6 +34,18 @@ def main(has_notebooks: bool) -> None: pyproject.changelog.append(msg) if pyproject.has_table("tool.poe"): do(_check_expected_sections, pyproject, has_notebooks) + if package_manager == "uv": + do(_configure_uv_executor, pyproject) + if pyproject.has_table("tool.poe.tasks"): + do(_check_no_uv_run, pyproject) + do(_set_all_task, pyproject) + if has_dependency(pyproject, "jupyterlab"): + do(_set_jupyter_lab_task, pyproject) + if has_notebooks: + pyproject.remove_dependency("nbmake") # cspell:ignore nbmake + do(_set_nb_task, pyproject) + do(_set_test_all_task, pyproject) + do(_update_doclive, pyproject) do(remove_lines, CONFIG_PATH.gitignore, r"\.tox/?") pyproject.remove_dependency("poethepoet") pyproject.remove_dependency("tox") @@ -27,24 +53,20 @@ def main(has_notebooks: bool) -> None: def _check_expected_sections(pyproject: Pyproject, has_notebooks: bool) -> None: - # cspell:ignore doclive docnb docnblive testenv - table_name = "tool.poe" - if not pyproject.has_table(table_name): - return - poe_table = pyproject.get_table(table_name) + poe_table = pyproject.get_table("tool.poe") tasks = set(poe_table.get("tasks", set())) expected_tasks: set[str] = set() - if Path("docs").exists(): + if has_documentation(): expected_tasks |= { "doc", "doclive", } if has_notebooks: - expected_tasks |= { - "docnb", - "docnblive", - "nb", - } + expected_tasks.add("nb") + if has_dependency(pyproject, "myst-nb"): + expected_tasks.update({"docnb", "docnblive"}) + if Path("tests").exists(): + expected_tasks.add("test") missing_tasks = expected_tasks - tasks if missing_tasks: msg = ( @@ -52,3 +74,188 @@ def _check_expected_sections(pyproject: Pyproject, has_notebooks: bool) -> None: f" {', '.join(sorted(missing_tasks))}" ) raise PrecommitError(msg) + + +def _configure_uv_executor(pyproject: ModifiablePyproject) -> None: + poe_table = pyproject.get_table("tool.poe") + executor_table = poe_table.get("executor") + if executor_table is None or isinstance(executor_table, str): + if "executor" in poe_table: + del poe_table["executor"] + executor_table = {} + has_dev = "dev" in pyproject.get_table("dependency-groups", fallback=set()) + if any([ + __safe_update(executor_table, "isolated", True), + __safe_update(executor_table, "no-group", "dev") if has_dev else False, + __safe_update(executor_table, "type", "uv"), + ]): + poe_table["executor"] = executor_table + msg = f"Set Poe the Poet executor to uv in {CONFIG_PATH.pyproject}" + pyproject.changelog.append(msg) + + +def _check_no_uv_run(pyproject: Pyproject) -> None: + tasks = pyproject.get_table("tool.poe.tasks") + offending_tasks = [] + for name, task in tasks.items(): + if __has_uv_run(task.get("cmd", "")) and task.get("executor") != "simple": + offending_tasks.append(name) + continue + if offending_tasks: + msg = ( + "Poe the Poet tasks should not use 'uv run' when the executor is set to" + " 'uv'. Offending tasks: " + f"{', '.join(sorted(offending_tasks))}" + ) + raise PrecommitError(msg) + + +def __has_uv_run(cmd: str | Sequence) -> bool: + if isinstance(cmd, str): + return "uv run" in cmd + if isinstance(cmd, Sequence): + return any(__has_uv_run(part) for part in cmd) + return False + + +def _set_all_task(pyproject: ModifiablePyproject) -> None: + task_table = pyproject.get_table("tool.poe.tasks") + if "all" not in task_table: + return + all_task = cast("Table", task_table["all"]) + if any([ + __safe_update( + all_task, "help", "Run all continuous integration (CI) tasks locally" + ), + __safe_update(all_task, "ignore_fail", "return_non_zero"), + ]): + msg = f"Updated Poe the Poet all task in {CONFIG_PATH.pyproject}" + pyproject.changelog.append(msg) + + +def _set_jupyter_lab_task(pyproject: ModifiablePyproject) -> None: + tasks = pyproject.get_table("tool.poe.tasks") + existing = cast("Mapping", tasks.get("lab", {})) + expected = { + "args": to_toml_array([{"name": "paths", "default": "", "positional": True}]), + "cmd": "jupyter lab ${paths}", + "help": "Launch Jupyter Lab", + } + if isinstance(executor := existing.get("executor"), Mapping): + expected["executor"] = executor + elif "jupyter" in set(pyproject.get_table("dependency-groups", fallback=set())): + expected["executor"] = to_inline_table({"group": "jupyter"}) + if existing != expected: + tasks["lab"] = expected + msg = f"Set Poe the Poet jupyter task in {CONFIG_PATH.pyproject}" + pyproject.changelog.append(msg) + + +def _set_nb_task(pyproject: ModifiablePyproject) -> None: + tasks = pyproject.get_table("tool.poe.tasks") + existing = cast("Table", tasks.get("nb", {})) + expected = { + "args": to_toml_array([ + {"name": "paths", "default": "docs", "multiple": True, "positional": True} + ]), + "cmd": "pytest --nbmake --nbmake-timeout=0 ${paths}", + "help": "Run all notebooks", + } + executor = {} + if "notebooks" in pyproject.get_table("dependency-groups", fallback=set()): + executor["group"] = "notebooks" + executor["with"] = "nbmake" + expected["executor"] = to_inline_table(executor) + if existing != expected: + tasks["nb"] = expected + msg = f"Set Poe the Poet nb task in {CONFIG_PATH.pyproject}" + pyproject.changelog.append(msg) + + +def _set_test_all_task(pyproject: ModifiablePyproject) -> None: + supported_python_versions = pyproject.get_supported_python_versions() + if len(supported_python_versions) <= 1: + return + tasks = pyproject.get_table("tool.poe.tasks") + if "test" not in tasks: + return + if "test-py" in tasks: + del tasks["test-py"] + pyproject.changelog.append( + f"Removed deprecated Poe the Poet task test-py in {CONFIG_PATH.pyproject}" + ) + existing = { + name: task + for name, task in tasks.items() + if name == "test-all" or re.match(r"^test-py3\d+$", name) + } + expected = {} + expected["test-all"] = { + "help": "Run all tests on each supported Python version", + "sequence": to_toml_array([ + {"ref": f"test-py{version.replace('.', '')} ${{paths}}"} + for version in supported_python_versions + ]), + "args": [ + { + "default": "", + "multiple": True, + "name": "paths", + "positional": True, + } + ], + } + expected.update({ + f"test-py{version.replace('.', '')}": { + "env": to_inline_table({"UV_PYTHON": version}), + "ref": "test", + } + for version in supported_python_versions + }) + if existing != expected: + for name in existing: + del tasks[name] + for name, task in expected.items(): + tasks[name] = task + msg = f"Updated Poe the Poet test-all task in {CONFIG_PATH.pyproject}" + pyproject.changelog.append(msg) + + +def _update_doclive(pyproject: ModifiablePyproject) -> None: + def combine(key: str, value: str) -> str | Array: + existing_value = executor.get(key) + if existing_value is None or existing_value == value: + return value + if isinstance(existing_value, str): + existing_value = [existing_value] + return to_toml_array(sorted({*existing_value, value}), multiline=False) + + tasks = pyproject.get_table("tool.poe.tasks") + if "doclive" not in tasks: + return + doclive_task = cast("Table", tasks["doclive"]) + executor = cast("dict[str, Any]", doclive_task.get("executor", {})) + if "doc" in pyproject.get_table("dependency-groups", fallback=set()): + executor["group"] = combine("group", "doc") + if "sphinx-autobuild" in doclive_task.get("cmd", ""): + executor["with"] = combine("with", "sphinx-autobuild") + pyproject.remove_dependency("sphinx-autobuild") # cspell:ignore autobuild + if any([ + __safe_update(doclive_task, "executor", to_inline_table(executor)) + if executor + else False, + __safe_update( + doclive_task, + "help", + "Set up a server to directly preview changes to the HTML pages", + ), + ]): + msg = f"Updated Poe the Poet doclive task in {CONFIG_PATH.pyproject}" + pyproject.changelog.append(msg) + + +def __safe_update(table: MutableMapping, key: str, expected_value: Any) -> bool: + if table.get(key) != expected_value: + table[key] = expected_value + return True + return False diff --git a/src/compwa_policy/check_dev_files/pyproject.py b/src/compwa_policy/check_dev_files/pyproject.py index 8cb5fe3c..5c66a144 100644 --- a/src/compwa_policy/check_dev_files/pyproject.py +++ b/src/compwa_policy/check_dev_files/pyproject.py @@ -4,6 +4,7 @@ import os import re +from typing import TYPE_CHECKING from compwa_policy.utilities import CONFIG_PATH from compwa_policy.utilities.pyproject import ModifiablePyproject @@ -13,8 +14,11 @@ ) from compwa_policy.utilities.toml import to_toml_array +if TYPE_CHECKING: + from compwa_policy.config import PythonVersion -def main(excluded_python_versions: set[str]) -> None: + +def main(excluded_python_versions: set[PythonVersion]) -> None: if not CONFIG_PATH.pyproject.exists(): return with ModifiablePyproject.load() as pyproject: @@ -132,7 +136,7 @@ def _update_requires_python(pyproject: ModifiablePyproject) -> None: def _update_python_version_classifiers( - pyproject: ModifiablePyproject, excluded_python_versions: set[str] + pyproject: ModifiablePyproject, excluded_python_versions: set[PythonVersion] ) -> None: if not pyproject.has_table("project"): return diff --git a/src/compwa_policy/check_dev_files/pytest.py b/src/compwa_policy/check_dev_files/pytest.py index 398e76c9..b7e5b9b8 100644 --- a/src/compwa_policy/check_dev_files/pytest.py +++ b/src/compwa_policy/check_dev_files/pytest.py @@ -7,6 +7,7 @@ import rtoml from ini2toml.api import Translator +from compwa_policy.errors import PrecommitError from compwa_policy.utilities import CONFIG_PATH, vscode from compwa_policy.utilities.cfg import open_config from compwa_policy.utilities.executor import Executor @@ -30,6 +31,7 @@ def main(single_threaded: bool) -> None: return do(_merge_coverage_into_pyproject, pyproject) do(_merge_pytest_into_pyproject, pyproject) + do(_deny_ini_options, pyproject) do(_update_codecov_settings, pyproject) do(_update_settings, pyproject) do(_update_vscode_settings, pyproject, single_threaded) @@ -73,6 +75,23 @@ def _merge_pytest_into_pyproject(pyproject: ModifiablePyproject) -> None: pyproject.changelog.append(msg) +def _deny_ini_options(pyproject: ModifiablePyproject) -> None: + if pyproject.has_table("tool.pytest.ini_options"): + msg = ( + "pytest.ini_options found in pyproject.toml. Have a look at" + " https://docs.pytest.org/en/stable/reference/customize.html#pyproject-toml" + " to migrate to a native TOML configuration." + ) + raise PrecommitError(msg) + pytest_config = pyproject.get_table("tool.pytest", fallback=None) + if pytest_config is None: + return + if "minversion" in pytest_config: # cspell:ignore minversion + return + pytest_config["minversion"] = "9.0" + pyproject.changelog.append("Set minimum pytest version to 9.0") + + def _update_settings(pyproject: ModifiablePyproject) -> None: table_key = "tool.pytest.ini_options" if not pyproject.has_table(table_key): diff --git a/src/compwa_policy/check_dev_files/readthedocs.py b/src/compwa_policy/check_dev_files/readthedocs.py index 49231205..7b02fbe9 100644 --- a/src/compwa_policy/check_dev_files/readthedocs.py +++ b/src/compwa_policy/check_dev_files/readthedocs.py @@ -2,7 +2,9 @@ from __future__ import annotations +import os import re +from functools import cache from pathlib import Path from textwrap import dedent, indent from typing import IO, TYPE_CHECKING, cast @@ -11,7 +13,7 @@ from compwa_policy.errors import PrecommitError from compwa_policy.utilities import CONFIG_PATH, get_nested_dict -from compwa_policy.utilities.match import filter_files +from compwa_policy.utilities.match import git_ls_files from compwa_policy.utilities.pyproject import ( Pyproject, get_constraints_file, @@ -59,12 +61,14 @@ def main( def _set_sphinx_configuration(config: ReadTheDocs) -> None: - if "sphinx" not in config.document: - config.document["sphinx"] = {} - sphinx = config.document["sphinx"] conf_path = __get_sphinx_config_path() - if "configuration" not in sphinx and conf_path: - sphinx["configuration"] = str(conf_path) + if conf_path is None: + return + conf_path = str(conf_path) + if config.document.get("sphinx", {}).get("configuration", "") != conf_path: + if "sphinx" not in config.document: + config.document["sphinx"] = {} + config.document["sphinx"]["configuration"] = conf_path msg = f"Set sphinx.configuration to {conf_path}" config.changelog.append(msg) @@ -73,7 +77,7 @@ def __get_sphinx_config_path() -> Path | None: conf_path = Path("docs/conf.py") if conf_path.exists(): return conf_path - candidate_paths = list(filter_files(["**/conf.py"])) + candidate_paths = git_ls_files("**/conf.py") if not candidate_paths: return None return Path(candidate_paths[0]) @@ -203,24 +207,20 @@ def __remove_nested_key(dct: dict, dotted_key: str) -> bool: def _update_build_step_for_pixi(config: ReadTheDocs) -> None: new_command = __get_pixi_install_statement() + "\n" pyproject = Pyproject.load() + docs_dir = _determine_docs_dir() if has_dependency(pyproject, "poethepoet"): - new_command += dedent(R""" + new_command += dedent(Rf""" export UV_LINK_MODE=copy - pixi run \ - uv run \ - --group doc \ - --no-dev \ - --with poethepoet \ - poe doc + pixi run poe doc mkdir -p $READTHEDOCS_OUTPUT - mv docs/_build/html $READTHEDOCS_OUTPUT + mv {docs_dir}/_build/html $READTHEDOCS_OUTPUT """).strip() else: - new_command += dedent(R""" + new_command += dedent(Rf""" export UV_LINK_MODE=copy pixi run doc mkdir -p $READTHEDOCS_OUTPUT - mv docs/_build/html $READTHEDOCS_OUTPUT + mv {docs_dir}/_build/html $READTHEDOCS_OUTPUT """).strip() __update_build_step( config, @@ -230,15 +230,12 @@ def _update_build_step_for_pixi(config: ReadTheDocs) -> None: def _update_build_step_for_uv(config: ReadTheDocs) -> None: - new_command = dedent(R""" + docs_dir = _determine_docs_dir() + new_command = dedent(Rf""" export UV_LINK_MODE=copy - uv run \ - --group doc \ - --no-dev \ - --with poethepoet \ - poe doc + uvx --from poethepoet poe doc mkdir -p $READTHEDOCS_OUTPUT - mv docs/_build/html $READTHEDOCS_OUTPUT + mv {docs_dir}/_build/html $READTHEDOCS_OUTPUT """).strip() __update_build_step( config, @@ -252,6 +249,21 @@ def _update_build_step_for_uv(config: ReadTheDocs) -> None: ) +@cache +def _determine_docs_dir() -> str: + for path in git_ls_files( + "conf.py", + "**/conf.py", + "_quarto.yml", + "**/_quarto.yml", + untracked=True, + ): + if os.path.isfile(path): + parent = os.path.dirname(path) + return parent or "." + return "docs" + + def __update_build_step( config: ReadTheDocs, new_command: str, search_function: Callable[[str], bool] ) -> None: diff --git a/src/compwa_policy/check_dev_files/ruff.py b/src/compwa_policy/check_dev_files/ruff.py index 37d2838d..832558c3 100644 --- a/src/compwa_policy/check_dev_files/ruff.py +++ b/src/compwa_policy/check_dev_files/ruff.py @@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any from ruamel.yaml import YAML +from setuptools import find_packages from compwa_policy.utilities import natural_sorting, remove_configs, vscode from compwa_policy.utilities.executor import Executor @@ -206,7 +207,7 @@ def _update_ruff_config( if has_notebooks: do(__update_flake8_builtins, pyproject) do(__update_flake8_comprehensions_builtins, pyproject) - do(__update_isort_settings, pyproject) + do(__update_isort_settings, pyproject, has_notebooks) do(__update_pydocstyle_settings, pyproject) do(__remove_nbqa, precommit, pyproject) @@ -509,12 +510,15 @@ def __update_flake8_comprehensions_builtins(pyproject: ModifiablePyproject) -> N ) -def __update_isort_settings(pyproject: ModifiablePyproject) -> None: - ___update_ruff_lint_table( - pyproject, - table_name="isort", - minimal_settings={"split-on-trailing-comma": False}, - ) +def __update_isort_settings( + pyproject: ModifiablePyproject, has_notebooks: bool +) -> None: + packages_names = [mod for mod in find_packages("src") if "." not in mod] + minimal_settings: dict[str, Any] = {} + if has_notebooks and packages_names: + minimal_settings["known-first-party"] = packages_names + minimal_settings["split-on-trailing-comma"] = False + ___update_ruff_lint_table(pyproject, "isort", minimal_settings) def __update_pydocstyle_settings(pyproject: ModifiablePyproject) -> None: diff --git a/src/compwa_policy/check_dev_files/ty.py b/src/compwa_policy/check_dev_files/ty.py index 814973da..5b1efba4 100644 --- a/src/compwa_policy/check_dev_files/ty.py +++ b/src/compwa_policy/check_dev_files/ty.py @@ -43,7 +43,7 @@ def _update_vscode_settings(type_checkers: set[TypeChecker]) -> None: } if "ty" in type_checkers: if "pyright" not in type_checkers: - settings["python.languageServer"] = "None" + vscode.remove_settings(["python.languageServer"]) vscode.add_extension_recommendation("astral-sh.ty") vscode.update_settings(settings) add_badge( diff --git a/src/compwa_policy/check_dev_files/update_lock.py b/src/compwa_policy/check_dev_files/upgrade_lock.py similarity index 64% rename from src/compwa_policy/check_dev_files/update_lock.py rename to src/compwa_policy/check_dev_files/upgrade_lock.py index a39a79ac..423d0cf3 100644 --- a/src/compwa_policy/check_dev_files/update_lock.py +++ b/src/compwa_policy/check_dev_files/upgrade_lock.py @@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Literal +from compwa_policy.check_dev_files.dependabot import get_dependabot_ecosystems from compwa_policy.check_dev_files.github_workflows import ( remove_workflow, update_workflow, @@ -20,34 +21,28 @@ from compwa_policy.utilities.yaml import create_prettier_round_trip_yaml if TYPE_CHECKING: - from compwa_policy.utilities.precommit import Precommit - + from compwa_policy.utilities.precommit import ModifiablePrecommit, Precommit Frequency = Literal[ - "no", - "biweekly", "monthly", - "bimonthly", "quarterly", - "biannually", - "outsource", + "semiannually", ] +"""The frequency of updating lock files.""" __CRON_SCHEDULES: dict[Frequency, str] = { - "biweekly": "0 2 * * 1", "monthly": "0 3 7 */1 *", - "bimonthly": "0 3 7 */2 *", "quarterly": "0 3 7 */3 *", - "biannually": "0 3 7 */6 *", + "semiannually": "0 3 7 */6 *", } +__TRIGGER_ECOSYSTEMS = {"julia", "uv"} -def main(precommit: Precommit, frequency: Frequency) -> None: +def main(precommit: ModifiablePrecommit, frequency: Frequency) -> None: with Executor() as do: - if frequency == "outsource": - do(_check_precommit_schedule, precommit) + do(_update_precommit_schedule, precommit, frequency) do(_remove_script, "pin_requirements.py") do(_remove_script, "upgrade.sh") - do(_update_requirement_workflow, frequency) + do(_update_requirement_workflow, precommit, frequency) def _remove_script(script_name: str) -> None: @@ -58,7 +53,7 @@ def _remove_script(script_name: str) -> None: raise PrecommitError(msg) -def _update_requirement_workflow(frequency: Frequency) -> None: +def _update_requirement_workflow(precommit: Precommit, frequency: Frequency) -> None: def overwrite_workflow(workflow_file: str) -> None: expected_workflow_path = ( COMPWA_POLICY_DIR / CONFIG_PATH.github_workflow_dir / workflow_file @@ -74,7 +69,10 @@ def overwrite_workflow(workflow_file: str) -> None: ) raise ValueError(msg) expected_data["on"]["pull_request"]["paths"] = existing_paths - if frequency == "outsource": + if ( + get_dependabot_ecosystems() & __TRIGGER_ECOSYSTEMS + or "autoupdate_schedule" in precommit.document.get("ci", {}) + ): del expected_data["on"]["schedule"] else: expected_data["on"]["schedule"][0]["cron"] = _to_cron_schedule(frequency) @@ -99,12 +97,26 @@ def _to_cron_schedule(frequency: Frequency) -> str: return __CRON_SCHEDULES[frequency] -def _check_precommit_schedule(precommit: Precommit) -> None: - schedule = precommit.document.get("ci", {}).get("autoupdate_schedule") - if schedule is None: - msg = ( - "Cannot outsource pip constraints updates, because autoupdate_schedule has" - f" not been set under the ci key in {CONFIG_PATH.precommit}. See" - " https://pre-commit.ci/#configuration-autoupdate_schedule." +def _update_precommit_schedule( + precommit: ModifiablePrecommit, frequency: Frequency +) -> None: + ci_section = precommit.document.get("ci") + if ci_section is None: + return + key = "autoupdate_schedule" + if key not in ci_section: + return + if get_dependabot_ecosystems() & __TRIGGER_ECOSYSTEMS: + del ci_section[key] + precommit.changelog.append( + "Deactivated pre-commit autoupdate schedule, because it is already" + f" triggered by the {CONFIG_PATH.github_workflow_dir / 'lock.yml'}." ) - raise PrecommitError(msg) + else: + if frequency == "semiannually": + frequency = "quarterly" + if ci_section[key] != frequency: + ci_section[key] = frequency + precommit.changelog.append( + f"Set pre-commit autoupdate schedule to {frequency!r}" + ) diff --git a/src/compwa_policy/check_dev_files/uv.py b/src/compwa_policy/check_dev_files/uv.py index 9529810f..6ddc6282 100644 --- a/src/compwa_policy/check_dev_files/uv.py +++ b/src/compwa_policy/check_dev_files/uv.py @@ -2,7 +2,6 @@ from __future__ import annotations -from functools import cache from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING @@ -13,7 +12,7 @@ from compwa_policy.errors import PrecommitError from compwa_policy.utilities import COMPWA_POLICY_DIR, CONFIG_PATH, readme, vscode from compwa_policy.utilities.executor import Executor -from compwa_policy.utilities.match import git_ls_files, matches_patterns +from compwa_policy.utilities.match import is_committed from compwa_policy.utilities.precommit.struct import Hook, Repo from compwa_policy.utilities.pyproject import ModifiablePyproject, Pyproject from compwa_policy.utilities.pyproject.getters import has_sub_table @@ -24,10 +23,11 @@ from compwa_policy.utilities.pyproject.getters import PythonVersion -def main( +def main( # noqa: PLR0917 + precommit_config: ModifiablePrecommit, dev_python_version: PythonVersion, + keep_contributing_md: bool, package_manager: PackageManagerChoice, - precommit_config: ModifiablePrecommit, organization: str, repo_name: str, ) -> None: @@ -41,7 +41,8 @@ def main( do(_update_editor_config) do(_update_python_version_file, dev_python_version) do(_update_uv_lock_hook, precommit_config) - do(_update_contributing_file, organization, repo_name) + if not keep_contributing_md: + do(_update_contributing_file, organization, repo_name) do(_remove_pip_constraint_files) do( vscode.remove_settings, @@ -68,7 +69,7 @@ def main( def _hide_uv_lock_from_vscode_search() -> None: - if __has_uv_lock_file(): + if is_committed("uv.lock"): vscode.update_settings({"search.exclude": {"**/uv.lock": True}}) @@ -110,7 +111,7 @@ def _remove_uv_lock() -> None: def _update_editor_config() -> None: if not CONFIG_PATH.editorconfig.exists(): return - if not __has_uv_lock_file(): + if not is_committed("uv.lock"): return expected_content = dedent(""" [uv.lock] @@ -124,6 +125,8 @@ def _update_editor_config() -> None: def _update_python_version_file(dev_python_version: PythonVersion) -> None: + if not CONFIG_PATH.pyproject.exists(): + return pyproject = Pyproject.load() python_version_file = Path(".python-version") if pyproject.has_table("project"): @@ -148,7 +151,7 @@ def _update_python_version_file(dev_python_version: PythonVersion) -> None: def _update_uv_lock_hook(precommit: ModifiablePrecommit) -> None: - if __has_uv_lock_file(): + if is_committed("uv.lock"): repo = Repo( repo="https://github.com/astral-sh/uv-pre-commit", rev="0.4.20", @@ -228,9 +231,3 @@ def __get_runner_instructions() -> str: if has_sub_table(pixi_config, "tasks"): return pixi_instructions return "" - - -@cache -def __has_uv_lock_file() -> bool: - files = git_ls_files(untracked=True) - return any(matches_patterns(file, ["uv.lock"]) for file in files) diff --git a/src/compwa_policy/set_nb_display_name.py b/src/compwa_policy/set_nb_display_name.py index 29118f15..f1cbf2de 100644 --- a/src/compwa_policy/set_nb_display_name.py +++ b/src/compwa_policy/set_nb_display_name.py @@ -10,7 +10,7 @@ from compwa_policy.errors import PrecommitError from compwa_policy.utilities.executor import Executor -from compwa_policy.utilities.match import filter_files +from compwa_policy.utilities.match import git_ls_files from compwa_policy.utilities.notebook import load_notebook from compwa_policy.utilities.pyproject import Pyproject, has_dependency @@ -38,7 +38,7 @@ def _set_nb_display_name(filename: str) -> None: .get("display_name") ) expected_display_name = "Python 3 (ipykernel)" - if filter_files(["pyproject.toml"]): + if git_ls_files("**/pyproject.toml"): pyproject = Pyproject.load() if has_dependency(pyproject, "pyproject-local-kernel"): expected_display_name = "Pyproject Local" diff --git a/src/compwa_policy/utilities/match.py b/src/compwa_policy/utilities/match.py index 48368e9e..fb49414e 100644 --- a/src/compwa_policy/utilities/match.py +++ b/src/compwa_policy/utilities/match.py @@ -3,21 +3,13 @@ from __future__ import annotations import subprocess # noqa: S404 +from functools import cache +from typing import TYPE_CHECKING from pathspec import PathSpec - -def filter_files(patterns: list[str], files: list[str] | None = None) -> list[str]: - """Filter filenames that match certain patterns. - - If :code:`files` is not supplied, get the files with :func:`git_ls_files`. - - >>> filter_files(["**/*.json", "**/*.txt"], ["a/b/file.json", "file.yaml"]) - ['a/b/file.json'] - """ - if files is None: - files = git_ls_files(untracked=True) - return [file for file in files if matches_patterns(file, patterns)] +if TYPE_CHECKING: + from collections.abc import Iterable def filter_patterns(patterns: list[str], files: list[str] | None = None) -> list[str]: @@ -33,22 +25,23 @@ def filter_patterns(patterns: list[str], files: list[str] | None = None) -> list return [pattern for pattern in patterns if matches_files(pattern, files)] -def git_ls_files(untracked: bool = False) -> list[str]: +def git_ls_files(*glob: str, untracked: bool = False) -> list[str]: """Get the tracked and untracked files, but excluding files in .gitignore.""" - output = subprocess.check_output([ # noqa: S607 - "git", - "ls-files", - ]).decode("utf-8") - tracked_files = output.splitlines() + output = _git_ls_files_cmd(*glob, untracked=untracked) + return output.splitlines() + + +def is_committed(*glob: str, untracked: bool = False) -> bool: + """Check if any files matching the given git wild-match patterns are committed.""" + return bool(_git_ls_files_cmd(*glob, untracked=untracked)) + + +@cache +def _git_ls_files_cmd(*glob: str, untracked: bool = False) -> str: + cmd = ["git", "ls-files", *glob] if untracked: - output = subprocess.check_output([ # noqa: S607 - "git", - "ls-files", - "--others", - "--exclude-standard", - ]).decode("utf-8") - return tracked_files + output.splitlines() - return tracked_files + cmd.extend(["--cached", "--exclude-standard", "--others"]) + return subprocess.check_output(cmd).decode("utf-8") # noqa: S603 def matches_files(pattern: str, files: list[str]) -> bool: @@ -65,7 +58,7 @@ def matches_files(pattern: str, files: list[str]) -> bool: return any(spec.match_file(file) for file in files) -def matches_patterns(filename: str, patterns: list[str]) -> bool: +def matches_patterns(filename: str, patterns: Iterable[str]) -> bool: """Use git wild-match patterns to match a filename. >>> matches_patterns(".cspell.json", patterns=["**/*.json"]) diff --git a/src/compwa_policy/utilities/pyproject/__init__.py b/src/compwa_policy/utilities/pyproject/__init__.py index 83a2407a..a5531558 100644 --- a/src/compwa_policy/utilities/pyproject/__init__.py +++ b/src/compwa_policy/utilities/pyproject/__init__.py @@ -67,11 +67,18 @@ def dumps(self) -> str: src = rtoml.dumps(self._document, pretty=True) return f"{src.strip()}\n" - def get_table(self, dotted_header: str, create: bool = False) -> Mapping[str, Any]: + def get_table( + self, dotted_header: str, *, create: bool = False, fallback: Any = None + ) -> Mapping[str, Any]: if create: msg = "Cannot create sub-tables in a read-only pyproject.toml" raise TypeError(msg) - return get_sub_table(self._document, dotted_header) + try: + return get_sub_table(self._document, dotted_header) + except KeyError as e: + if fallback is not None: + return fallback + raise e from e @final def has_table(self, dotted_header: str) -> bool: @@ -184,12 +191,12 @@ def dump(self, target: IO | Path | str | None = None) -> None: @override def get_table( - self, dotted_header: str, create: bool = False + self, dotted_header: str, *, create: bool = False, fallback: Any = None ) -> MutableMapping[str, Any]: self.__assert_is_in_context() if create: create_sub_table(self._document, dotted_header) - return super().get_table(dotted_header) # ty:ignore[invalid-return-type] + return super().get_table(dotted_header, fallback=fallback) # ty:ignore[invalid-return-type] def add_dependency( self, diff --git a/src/compwa_policy/utilities/pyproject/getters.py b/src/compwa_policy/utilities/pyproject/getters.py index 51463462..3a745b95 100644 --- a/src/compwa_policy/utilities/pyproject/getters.py +++ b/src/compwa_policy/utilities/pyproject/getters.py @@ -129,7 +129,7 @@ def _get_requires_python(project: Mapping[str, Any]) -> str: def _get_allowed_versions( - version_range: str, exclude: set[str] | None = None + version_range: str, exclude: set[PythonVersion] | None = None ) -> list[PythonVersion]: """Get a list of allowed versions from a version range specifier. diff --git a/src/compwa_policy/utilities/pyproject/setters.py b/src/compwa_policy/utilities/pyproject/setters.py index 4b42bf5b..6419cbbb 100644 --- a/src/compwa_policy/utilities/pyproject/setters.py +++ b/src/compwa_policy/utilities/pyproject/setters.py @@ -209,8 +209,10 @@ def split_dependency_definition(definition: str) -> tuple[str, str, str]: ('my_package', '~=', '1.2') >>> split_dependency_definition("any_version_package==*") ('any_version_package', '==', '*') + >>> split_dependency_definition("python-lsp-server[rope]") + ('python-lsp-server[rope]', '', '') """ - matches = re.match(r"^([a-zA-Z0-9_-]+)([\!<=>~\s]*)([^ ^#]*)", definition) + matches = re.match(r"^([a-zA-Z0-9_\[\]-]+)([\!<=>~\s]*)([^ ^#]*)", definition) if not matches: msg = f"Could not extract package name and version from {definition}" raise ValueError(msg) diff --git a/src/compwa_policy/utilities/toml.py b/src/compwa_policy/utilities/toml.py index 9f7df372..894071b6 100644 --- a/src/compwa_policy/utilities/toml.py +++ b/src/compwa_policy/utilities/toml.py @@ -5,10 +5,10 @@ from typing import TYPE_CHECKING, Any import tomlkit -from tomlkit.items import String, StringType, Trivia +from tomlkit.items import InlineTable, String, StringType, Trivia if TYPE_CHECKING: - from collections.abc import Iterable + from collections.abc import Iterable, Mapping from tomlkit.items import Array @@ -23,5 +23,12 @@ def to_toml_array(items: Iterable[Any], multiline: bool | None = None) -> Array: return array +def to_inline_table(value: Mapping[str, Any]) -> InlineTable: + table = tomlkit.inline_table() + for key, val in value.items(): + table[key] = val + return table + + def to_multiline_string(value: str) -> String: return String(StringType.MLB, value, value, Trivia()) diff --git a/src/compwa_policy/utilities/vscode.py b/src/compwa_policy/utilities/vscode.py index 542bd3d1..d828a819 100644 --- a/src/compwa_policy/utilities/vscode.py +++ b/src/compwa_policy/utilities/vscode.py @@ -5,6 +5,7 @@ import json from collections import abc from collections.abc import Iterable, Sized +from functools import cache from typing import TYPE_CHECKING, Any, TypeVar from compwa_policy.errors import PrecommitError @@ -24,10 +25,19 @@ """Type for keys to be removed from a (nested) dictionary.""" +def get_recommended_extensions() -> set[str]: + return _get_extension_recommendations("recommendations") + + def get_unwanted_extensions() -> set[str]: + return _get_extension_recommendations("unwantedRecommendations") + + +@cache +def _get_extension_recommendations(key: str) -> set[str]: config = __load_config(CONFIG_PATH.vscode_extensions) - unwanted_extensions = config.get("unwantedRecommendations", set()) - return {ext.lower() for ext in unwanted_extensions} + extensions = config.get(key, set()) + return {ext.lower() for ext in extensions} def remove_settings(keys: RemovedKeys) -> None: @@ -117,7 +127,7 @@ def _determine_new_value(old: V, new: V, sort: bool = False) -> V: if isinstance(old, dict) and isinstance(new, dict): return _update_dict_recursively(old, new, sort) # ty:ignore[invalid-return-type] if isinstance(old, list) and isinstance(new, list): - return sorted({*old, *new}) # ty:ignore[invalid-return-type] + return sorted({*old, *new}) # ty:ignore[invalid-argument-type] return new