diff --git a/.bumpversion.cfg b/.bumpversion.cfg index a4ae49e9..6ac73dff 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,9 +1,9 @@ [bumpversion] -current_version = 4.3.2 +current_version = 0.2.0 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z]+)? -serialize = +serialize = {major}.{minor}.{patch}{releaselevel} {major}.{minor}.{patch} @@ -12,4 +12,3 @@ serialize = [bumpversion:file:docs/includes/introduction.txt] [bumpversion:file:README.rst] - diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 54f27622..aca083c3 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -26,4 +26,3 @@ Paste the full traceback (if there is any) * Python version * Mode version * Operating system - diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml new file mode 100644 index 00000000..f44de8d1 --- /dev/null +++ b/.github/workflows/dist.yml @@ -0,0 +1,70 @@ +# vim:ts=2:sw=2:et:ai:sts=2 +name: 'Build distribution' + +on: + # Only run when release is created in the master branch + release: + types: [created] + branches: + - 'master' + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-11] + + steps: + - uses: actions/checkout@v3 + + - name: Build wheels + uses: pypa/cibuildwheel@v2.8.1 + env: + CIBW_MANYLINUX_X86_64_IMAGE: 'manylinux2014' + CIBW_ARCHS: auto64 + CIBW_BUILD: 'cp3*' + CIBW_SKIP: '*p36*' + CIBW_BEFORE_BUILD: pip3 install Cython + + - uses: actions/upload-artifact@v3 + with: + path: ./wheelhouse/*.whl + + build_sdist: + name: 'Build source distribution' + runs-on: 'ubuntu-latest' + steps: + - uses: actions/checkout@v2 + name: 'Checkout source repository' + + - uses: actions/setup-python@v4 + + - name: 'Build sdist' + run: > + pip3 install pkgconfig cython --upgrade && + python3 setup.py sdist + + - uses: actions/upload-artifact@v2 + name: 'Upload build artifacts' + with: + path: 'dist/*.tar.gz' + + upload_pypi: + name: 'Upload packages' + needs: ['build_wheels', 'build_sdist'] + runs-on: 'ubuntu-latest' + if: github.event_name == 'release' && github.event.action == 'created' + steps: + - uses: actions/download-artifact@v3 + name: 'Download artifacts' + with: + name: 'artifact' + path: 'dist' + + - uses: pypa/gh-action-pypi-publish@release/v1 + name: "Publish package to PyPI" + with: + user: '__token__' + password: '${{ secrets.PYPI_API_TOKEN }}' diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml new file mode 100644 index 00000000..d4fca155 --- /dev/null +++ b/.github/workflows/gh-pages.yml @@ -0,0 +1,39 @@ +name: Pages + +on: + push: + branches: + - master + +jobs: + build: + name: "Build docs" + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v4 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + - name: "Install deps and build with Sphinx" + run: make docs + - name: "Upload artifacts" + uses: actions/upload-pages-artifact@v1 + with: + # Upload built docs + path: "./Documentation" + deploy: + name: "Deploy docs" + needs: build + runs-on: ubuntu-latest + # Grant GITHUB_TOKEN the permissions required to make a Pages deployment + permissions: + pages: write # to deploy to Pages + id-token: write # to verify the deployment originates from an appropriate source + # Deploy to the github-pages environment + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - uses: actions/deploy-pages@v1 + id: deployment + name: "Deploy to GitHub Pages" diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml new file mode 100644 index 00000000..b3552d77 --- /dev/null +++ b/.github/workflows/python-package.yml @@ -0,0 +1,39 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Python package + +on: + push: + branches: ["master"] + pull_request: + branches: ["master"] + +jobs: + tests: + name: "Python ${{ matrix.python-version }}/Cython: ${{ matrix.use-cython }}" + runs-on: "ubuntu-latest" + + strategy: + # Complete all jobs even if one fails, allows us to see + # for example if a test fails only when Cython is enabled + fail-fast: false + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + use-cython: ["true", "false"] + env: + USE_CYTHON: ${{ matrix.use-cython }} + + steps: + - uses: "actions/checkout@v2" + - uses: "actions/setup-python@v2" + with: + python-version: "${{ matrix.python-version }}" + - name: "Install dependencies" + run: "scripts/install" + - name: "Run linting checks" + run: "scripts/check" + - name: "Run tests" + run: "scripts/tests" + - name: "Enforce coverage" + run: "scripts/coverage" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..25e434e3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,30 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + + - repo: https://github.com/ambv/black + rev: 21.9b0 + hooks: + - id: black + + - repo: https://github.com/pycqa/isort + rev: 5.9.3 + hooks: + - id: isort + name: isort (python) + + - repo: local + hooks: + - id: flake8 + name: flake8 + stages: [commit] + language: python + entry: flake8 + types: [python] diff --git a/.travis.yml b/.travis.yml index 9a4dca81..2563f7ad 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,30 +12,30 @@ stages: matrix: include: - - python: 3.8.0 + - python: 3.8.6 env: TOXENV=flake8 os: linux - dist: xenial + dist: bionic stage: lint - - python: 3.8.0 + - python: 3.8.6 env: TOXENV=typecheck os: linux - dist: xenial + dist: bionic stage: lint - - python: 3.8.0 + - python: 3.8.6 env: TOXENV=apicheck os: linux - dist: xenial + dist: bionic stage: lint - - python: 3.8.0 + - python: 3.8.6 env: TOXENV=docstyle os: linux - dist: xenial + dist: bionic stage: lint - - python: 3.8.0 + - python: 3.8.6 env: TOXENV=bandit os: linux - dist: xenial + dist: bionic stage: lint - python: 3.6.3 env: TOXENV=3.6 RUN_SUITE=y @@ -105,7 +105,54 @@ matrix: - python: 3.8.0 env: TOXENV=3.8 IDENT="3.8.0" RUN_SUITE=y os: linux - dist: xenial + dist: focal + stage: test + - python: 3.8.1 + env: TOXENV=3.8 IDENT="3.8.1" RUN_SUITE=y + os: linux + dist: focal + stage: test + - python: 3.8.2 + env: TOXENV=3.8 IDENT="3.8.2" RUN_SUITE=y + os: linux + dist: focal + stage: test + - python: 3.8.3 + env: TOXENV=3.8 IDENT="3.8.3" RUN_SUITE=y + os: linux + dist: focal + stage: test + - python: 3.8.4 + env: TOXENV=3.8 IDENT="3.8.4" RUN_SUITE=y + os: linux + dist: focal + stage: test + - python: 3.8.5 + env: TOXENV=3.8 IDENT="3.8.5" RUN_SUITE=y + os: linux + dist: focal + stage: test + - python: 3.8.6 + env: TOXENV=3.8 IDENT="3.8.6" RUN_SUITE=y + os: linux + dist: focal + stage: test + - python: 3.9.0 + env: TOXENV=3.9 IDENT="3.9.0" RUN_SUITE=y + os: linux + dist: focal + stage: test + - name: Latest python-3.9 + python: 3.9 + env: TOXENV=3.9 IDENT="3.9" RUN_SUITE=y + os: linux + dist: bionic + stage: test + - name: Latest python-3.10 + python: 3.10.0 + env: TOXENV=3.10 IDENT="3.10" RUN_SUITE=y + os: linux + dist: bionic stage: test before_install: diff --git a/CODE_OF_CONDUCT.rst b/CODE_OF_CONDUCT.rst index d59514a5..7141c712 100644 --- a/CODE_OF_CONDUCT.rst +++ b/CODE_OF_CONDUCT.rst @@ -41,4 +41,3 @@ reported by opening an issue or contacting one or more of the project maintainer This Code of Conduct is adapted from the Contributor Covenant, version 1.2.0 available at http://contributor-covenant.org/version/1/2/0/. - diff --git a/Changelog b/Changelog index eef67d15..234e0da0 100644 --- a/Changelog +++ b/Changelog @@ -4,1879 +4,32 @@ Change history ================ -.. version-4.4.0: +.. version-0.2.0: -4.4.0 +0.2.0 ===== -:release-date: 2020-09-28 11:00 A.M PST -:release-by: Ask Solem (:github_user:`ask`) +:release-date: 2021-10-14 +:release-by: Taybin Rutkin (:github_user:`taybin`) -- Supervisors now raises :exc:`SystemExit` on max restarts exceeded. +- Support python-3.10 -- Queue: Now includes high pressure and pressure drop conditions. +- format with black and isort - This means you can now add callbacks when the queue is under high - pressure, and for when the pressure drops again. +- add crontab timer from Faust (:github_user:`lqhuang`) -- Service: Added ``human_tracebacks()`` method. +.. version-0.1.0: - -.. _version-4.3.2: - -4.3.2 -===== -:release-date: 2020-02-13 3:21 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Tracebacks: Added support for async generators. - - Also added new functions: - - + :func:`~mode.utils.tracebacks.format_agen_stack` - + :func:`~mode.utils.tracebacks.print_agen_stack` - -- Logging: New version of log argument formatters. - - The existing log argument formatter callbacks did - not have access to the original log record, so could - not make decisions based on matching the log message string - for example. - - A new :func:`~mode.utils.logging.formatter2` decorator - has been added that registers callbacks taking two arguments. - - Example: - - .. sourcecode:: python - - from mode.utils.logging import formatter2 - - @formatter2 - def format_log_argument(arg: Any, record: logging.LogRecord): - # improve aiokafka logging to sort the list - # of topics logged when subscribing to topics - # and make it more human readable - if record.msg.startswith('Subscribing to topics'): - if isinstance(arg, frozenset): - return ', '.join(sorted(arg)) - -.. _version-4.3.1: - -4.3.1 -===== -:release-date: 2020-02-10 2:40 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Added :func:`mode.utils.times.humanize_seconds_ago` - - This formats seconds float to text "n seconds ago", - or "just now" if less than one second. - -- Added :func:`mode.utils.text.enumeration` - - This formats a list of strings to a enumerated list, - for example: - - .. sourcecode:: pycon - - >>> text.enumeration(['x', 'y', '...']) - "1) x\n2) y\n3) ..." - -.. _version-4.3.0: - -4.3.0 -===== -:release-date: 2020-01-22 2:25 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Threads: Now runs with :envvar:`PYTHONASYNCIODEBUG`. - -- Threads: Fixed race condition where pending methods are lost - -- utils.tracebacks: Adds ``print_coro_stack`` and - ``format_coro_stack`` for debugging coroutines. - -- Timers: Adds sleeptime and runtime to logs for more info. - -- Threads: Threads now do two-way keepalive (thread -> parent, parent -> thread) - -- Service: ``add_future`` now sets task name - -- Worker: ``SIGUSR2`` now starts :mod:`pdb` session. - -.. _version-4.2.0: - -4.2.0 -===== -:release-date: 2020-01-15 5:00 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Timers: Exclude timer callback time in drift calculation - -- Service: `maybe_start()` now returns bool, and :const:`True` - if the service was started. - -.. _version-4.1.9: - -4.1.9 -===== -:release-date: 2020-01-13 11:18 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- QueueServiceThread: Stop method queue before stopping child services. - -- Small fixes to render graph images correctly. - -.. _version-4.1.8: - -4.1.8 -===== -:release-date: 2020-01-07 4:00 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- ``ServiceThread.crash()`` now immediately sets exception - when called in main thread. - -.. _version-4.1.7: - -4.1.7 -===== -:release-date: 2020-01-07 3:20 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Now depends on :pypi:`typing_extensions` (Issue #53) - - This dependency was previously missing resulting in errors. - -- Fixed :class:`~mode.threads.ServiceThread` hang on exceptions raised - (Issue #54). - - Contributed by Alexey Basov (:github_user:`r313pp`) - and Jonathan Booth (:github_user:`jbooth-mastery`). - - - -.. _version-4.1.6: - -4.1.6 -===== -:release-date: 2019-12-12 2:30 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- logging: Fixes recursion error on Python 3.6 (Issue #52) - -- Makefile: Added `make develop` to install dependencies into the currently - activated virtualenv. - -- Tests: Removed `was never awaited` warning during test run. - -- CI: Revert back to using Python 3.7.5 for Windows build. - -.. _version-4.1.5: - -4.1.5 -===== -:release-date: 2019-12-11 3:45 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Tests passing on Python 3.8.0 - -- Fixed typing related issues. - -- Added :file:`__init__.py` to `mode.utils` package. - -.. _version-4.1.4: - -4.1.4 -===== -:release-date: 2019-10-30 3:23 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Timers: Do not log drift for subsecond interval timers. - -.. _version-4.1.3: - -4.1.3 -===== -:release-date: 2019-10-29 2:14 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Service: Ignore :exc:`asyncio.CancelledError` when a child service - is stopped. - -- flight recorder: Adds ability to add logging extra-data that - propagates to child recorders. - - -.. _version-4.1.2: - -4.1.2 -===== -:release-date: 2019-10-02 3:41 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds easy flight recorder interface. - - Instead of passing flight recorders around, we now have the concept - of a ``current_flight_recorder``. - - There is also a new ``on_timeout`` wrapper object that always - logs to the current flight recorder: - - .. sourcecode:: python - - from mode.utils.logging import flight_recorder, on_timeout - - async def main(): - with flight_recorder(logger, timeout=300): - some_function() - - def some_function(): - on_timeout.error('Fetching data') - fetch_data() - - on_timeout.error('Processing data') - process_data() - - This uses a :class:`~mode.utils.locals.LocalStack`, - so flight recorders can be arbitrarily nested. - Once a flight recorder context exits, the previously active flight - recorder will be set active again. - -- Logging: Default logging format now includes process PID. - -- Adds :class:`~mode.utils.collections.Heap` as generic interface - to the heapq module - -.. _version-4.1.1: - -4.1.1 -===== -:release-date: 2019-10-02 3:41 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- ``is_optional``: Now works with union having multiple args on Python 3.6. - -- ``Proxy``: Due to `Python issue #29581`_ the ``source`` class option - to :class:`~mode.locals.Proxy` cannot be used on Python 3.6. - - To work around this when support for 3.6 is a requirement - you can now use a ``__proxy_source__`` class attribute instead: - - .. sourcecode:: python - - class MappingProxy(Proxy[Mapping]): - proxy_source__ = Mapping - - you want to support 3.7 and up you can continue to use the class syntax: - - .. sourcecode:: python - - class MappingProxy(Proxy[Mapping], source=Mapping): - ... - -.. _`Python issue #29581`: https://bugs.python.org/issue29581 - -.. _version-4.1.0: - -4.1.0 -===== -:release-date: 2019-09-27 2:00 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Worker: Adds ``override_logging`` option to use your own logging - setup (Issue #46). - -- Service: Adds ``Service.crash_reason`` (Issue #50). - -- Service: Adds ``remove_dependency`` to disable dependencies at runtime - (Issue #49). - - Contributed by Martin Maillard. - -- Timers: Increase max drift to silence noisy logs. - -- Proxy: Adds support for lazy proxying of objects. - - See :mod:`mode.locals`. - -- Documentation improvements by: - - + :github_user:`tojkamaster`. - - + :github_user:`casio` - -.. _version-4.0.1: - -4.0.1 -===== -:release-date: 2019-07-17 3:42 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Utils: :class:`~mode.utils.future.stampede` objects can now be - read by :pypi:`Sphinx` and :func:`inspect.signature`. - -- CI: Adds CPython 3.7.3 to build matrix, and set as default for lint stages - -.. _version-4.0.0: - -4.0.0 -===== -:release-date: 2019-05-07 2:21 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- 100% Test Coverage - -- Fixed several edge case bugs that were never reported. - -.. _version-3.2.2: - -3.2.2 -===== -:release-date: 2019-04-07 6:18 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- :class:`~mode.utils.typing.AsyncGenerator` takes two arguments. - - Mistakenly had it take three arguments, like :class:`typing.Generator`.S - -.. _version-3.2.1: - -3.2.1 -===== -:release-date: 2019-04-07 4:07 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds :class:`mode.utils.typing.AsyncGenerator` - to import :class:`typing.AsyncGenerator` missing from Python 3.6.0. - -.. _version-3.2.0: - -3.2.0 -===== -:release-date: 2019-04-06 11:00 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds ``Service.itertimer``: used to perform periodic - tasks, but with automatic drift adjustment. - -- Adds :func:`mode.utils.mocks.ContextMock` - - To mock a regular context manager. - -.. _version-3.1.3: - -3.1.3 -===== -:release-date: 2019-04-04 08:41 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- ``mode.utils.worker.exiting`` now takes option to print exceptions. - -- Threads: Method queue "starting..." logs now logged with debug severity. - -- Worker: execute_from_commandline no longer swallow errors if loop closed. - -- Adds :class:`mode.locals.LocalStack`. - -.. _version-3.1.2: - -3.1.2 -===== -:release-date: 2019-04-04 08:37 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -+ **Revoked release**: Version without changelog entry was uploaded to PyPI. - Please upgrade to 3.1.3. - -.. _version-3.1.1: - -3.1.1 -===== -:release-date: 2019-03-27 10:02 A.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Service: property ``should_stop`` is now true if service crashed. - -- Timers: Avoid drift + introduce a tiny bit of drift to timers. - - Thanks to Bob Haddleton (:github_user:`bobh66`) for discovering - this issue. - -.. _version-3.1.0: - -3.1.0 -===== -:release-date: 2019-03-21 03:26 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds :class:`~mode.utils.contexts.nullcontext` - and :class:`~mode.utils.contexts.asyncnullcontext`. - - Backported from Python 3.7 you can import these - from :mod:`mode.utils.contexts`. - -- Mode project changes: - - + Added :pypi:`bandit` to CI lint build. - - + Added :pypi:`pydocstyle` to CI lint build. - -.. _version-3.0.13: - -3.0.13 -====== -:release-date: 2019-03-20 04:58 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds ``CompositeLogger.warning`` alias to ``warn``. - - :pypi:`flake8-logging-format` has a rule that says - you are only allowed to use ``.warning``, so going with that. - -.. _version-3.0.12: - -3.0.12 -====== -:release-date: 2019-03-20 03:23 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds :func:`~mode.utils.futures.all_tasks` as a backward compatible - :func:`asyncio.all_tasks`. - -- Signal: Fixes ``.connect()`` decorator to work with parens and without - - Signal decorator now works with parens: - - .. sourcecode:: python - - @signal.connect() - def my_handler(sender, **kwargs): - ... - - and without parens: - - .. sourcecode:: python - - @signal.connect - def my_handler(sender, **kwargs): - ... - -- Signal: Do not use weakref by default. - - Using weakref by default meant it was too easy to connect - a signal handler to only have it disappear because there were - no more references to the object. - -.. _version-3.0.11: - -3.0.11 -====== -:release-date: 2019-03-19 08:50 A.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Adds ThrowableQueue._throw() for non-async version of .throw(). - -.. _version-3.0.10: - -3.0.10 -====== -:release-date: 2019-03-14 03:55 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Worker: was giving successful exit code when an exception is raised. - - The ``.execute_from_commandline`` method now always exits - (its return type is :class:`typing.NoReturn`). - -- Adds :class:`~mode.utils.compat.NoReturn` to :mod:`mode.utils.compat`. - - Import :class:`typing.NoReturn` from here to support Python versions - before 3.6.3. - -.. _version-3.0.9: - -3.0.9 -===== -:release-date: 2019-03-08 01:20 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Threads: Add multiple workers for thread method queue. - - Default number of workers is now 2, to allow for - two recursive calls. - -- Signal: Use `Signal.label` instead of ``.indent`` to be more consistent. - -- Signal: Properly set ``.name`` when signal is member of class. - -- Adds ability to log the FULL traceback of :class:`asyncio.Task`. - -- Service: Stop faster if stopped immediately after start - -- Service: Correctly track dependencies for services added - using ``Service.on_init_dependencies`` (Issue #40). - - Contributed by Nimi Wariboko Jr (:github_user:`nemosupremo`). - -.. _version-3.0.8: - -3.0.8 -===== -:release-date: 2019-01-25 03:54 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Fixes ``DeprecationWarning`` importing from ``collections``. - -- stampede: Fixed edge case where stampede wrapped function - called multiple times. - - Calling the same stampede wrapped function multiple times - within the same event loop iteration would previously call - the function multiple times. - - For example using :func:`asyncio.gather`: - - .. sourcecode:: python - - from mode.utils.futures import stampede - - count = 0 - @stampede - async def update_count(): - global count - count += 1 - - async def main(): - await asyncio.gather( - update_count(), - update_count(), - update_count(), - update_count(), - ) - - assert count == 1 - - Previously this would call the function four times, but with the - fix it's only called once and provides the expected result. - -- Mocks: Adds :func:`~mode.utils.mocks.mask_module` and - :func:`~mode.utils.mocks.patch_module`. - -- CI: Added Windows build. - -- CI: Enabled random order for tests. - - -.. _version-3.0.7: - -3.0.7 -===== -:release-date: 2019-01-18 01:12 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- **ServiceThread** ``.stop()`` would wait for thread shutdown - even if thread was never started. - -- **CI**: Adds CPython 3.7.2 and 3.6.8 to build matrix - -.. _version-3.0.6: - -3.0.6 -===== -:release-date: 2019-01-07 12:10 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Adds ``%(extra)s`` as log format option. - - To add additional context to your logging statements use for example:: - - logger.error('Foo', extra={'data': {'database': 'db1'}}) - -.. _version-3.0.5: - -3.0.5 -===== -:release-date: 2018-12-19 04:40 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Fixes compatibility with :pypi:`colorlog` 4.0.x. - - Contributed by Ryan Whitten (:github_user:`rwhitten577`). - -.. _version-3.0.4: - -3.0.4 -===== -:release-date: 2018-12-07 04:40 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Now depends on :pypi:`mypy_extensions`. - -.. _version-3.0.3: - -3.0.3 -===== -:release-date: 2018-12-07 3:22 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Threads: Fixed delay in shutdown if ``on_thread_stop`` callback raises - exception. - -- Service: Stopping of children no longer propagates exceptions, to ensure - other services are still stopped. - -- Worker: Fixed race condition if worker stopped before being fully started. - - This would lead the worker to shutdown early before fully stopping - all dependent services. - -- Tests: Adds :class:`~mode.utils.mocks.AsyncMagicMock` - -.. _version-3.0.2: - -3.0.2 -===== -:release-date: 2018-12-07 1:14 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Worker: Fixes crash on Windows where signal handlers cannot be registered. - -- Utils: Adds :func:`~mode.utils.objects.shortname` to get non-qualified - object path. - -- Utils: Adds :func:`~mode.utils.objects.canonshortname` to get non-qualified - object path that attempts to resolve the real name of ``__main__``. - -.. _version-3.0.1: - -3.0.1 -===== -:release-date: 2018-12-06 10:20 A.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Worker: Added new callback ``on_worker_shutdown``. - -- Worker: Do not stop twice, instead wait for original stop to complete. - - Signals would start multiple stopping coroutines, leading to - the worker shutting down too fast. - -- Threads: All ``ServiceThread`` services needs a keepalive - coroutine to be scheduled. - -- Supervisor: Fixed issue with ``CrashingSupervisor`` where - service would not crash. - -.. _version-3.0.0: - -3.0.0 -===== -:release-date: 2018-11-30 4:48 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- ``ServiceThread`` no longer uses ``run_in_executor``. - - Since services are long running, it is not a good idea for - them to block pool worker threads. Instead we run one - thread for every ServiceThread. - -- Adds :class:`~mode.threads.QueuedServiceThread` - - This subclass of :class:`~mode.threads.ServiceThread` enables - the use of a queue to send work to the service thread. - - This is useful for services that wrap blocking network clients - for example. - - If you have a blocking Redis client you could run it in a separate - thread like this: - - .. code-block:: python - - class Redis(QueuedServiceThread): - _client: StrictRedis = None - - async def on_start(self) -> None: - self._client = StrictRedis() - - async def get(self, key): - return await self.call_thread(self._client.get, key) - - async def set(self, key, value): - await self.call_thread(self._client.set, key, value) - - The actual redis client will be running in a separate thread (with a - separate event loop). The ``get`` and ``set`` methods will delegate - to the thread, and return only when the thread is finished handling - them and is ready with a result: - - .. sourcecode:: python - - async def use_redis(): - # We use async-with-statement here, but - # can also do `await redis.start()` then `await redis.stop()` - async with Redis() as redis: - await redis.set(key='foo', value='bar') - assert await redis.get(key='foo') == 'bar' - -- Collections: ``FastUserSet`` and ``ManagedUserSet`` now implements - all :class:`set` operations. - -- Collections are now generic types. - - You can now subclass collections with typing information: - - - ``class X(FastUserDict[str, int]): ...`` - - ``class X(ManagedUserDict[str, int]): ...`` - - ``class X(FastUserSet[str]): ...`` - - ``class X(ManagedUserSet[str]): ...`` - -- :func:`~mode.utils.futures.maybe_async` utility now - also works with ``@asyncio.coroutine`` decorated coroutines. - -- Worker: SIGUSR1 cry handler: Fixed crash when coroutine does not have - ``__name__`` attribute. - -.. _version-2.0.4: - -2.0.4 -===== -:release-date: 2018-11-19 1:07 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- ``FlowControlQueue.clear`` now cancels all waiting for ``Queue.put``. - -.. _version-2.0.3: - -2.0.3 -===== -:release-date: 2018-11-05 5:20 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Adds `Service.wait_first(*coros)` - - Wait for the first coroutine to return, where coroutines can also - be :class:`asyncio.Event`. - - Returns :class:`mode.services.WaitResults` with fields: - - - ``.done`` - List of arguments that are now done. - - ``.results`` - List of return values in order of ``.done``. - - ``.stopped`` - Set to True if the service was stopped. - -.. _version-2.0.2: - -2.0.2 -===== -:release-date: 2018-11-03 9:07 A.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Now depends on :pypi:`aiocontextvars` 0.2 - - This release uses :pep:`508` syntax for conditional requirements, - as :ref:`version-2.0.1` did not work when installing wheel. - -.. _version-2.0.1: - -2.0.1 -===== -:release-date: 2018-11-02 7:38 P.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Now depends on :pypi:`aiocontextvars` 0.2 - -.. _version-2.0.0: - -2.0.0 -===== -:release-date: 2018-11-02 9:12 A.M PST -:release-by: Ask Solem (:github_user:`ask`) - -- Services now create the event loop on demand. - - This means the event loop is no longer created in `Service.__init__` - so that services can be defined at module scope without initializing - the loop. - - This makes the ``ServiceProxy`` pattern redundant for most use cases. - -- Adds ``.utils.compat.current_task`` as alias for - :mod:`asyncio.current_task`. - -- Adds support for contextvars in Python 3.6 using :pypi:`aiocontextvars`. - - In mode services you can now use :mod:`contextvars` module even - on Python 3.6, thanks to the work of :github_user:`fantix`. - -.. _version-1.18.2: - -1.18.2 -====== -:release-date: 2018-11-30 6:23 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Worker: SIGUSR1 cry handler: Fixed crash when coroutine does not have - ``__name__`` attribute. - -.. _version-1.18.1: - -1.18.1 -====== -:release-date: 2018-10-03 2:49 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- **Service**: ``Service.from_awaitable(coro)`` improvements. - - The resulting ``service.start`` will now: - - + Convert awaitable to :class:`asyncio.Task`. - + Wait for task to complete. - - then ``service.stop`` will: - - + Cancel the task. - - This ensures an ``asyncio.sleep(10.0)`` within can be - cancelled. If you need some operation to absolutely finish you must - use `asyncio.shield`. - -- **Utils**: ``cached_property`` adds new ``.is_set(o)`` method on descriptor - - This can be used to test for the attribute having been cached/used. - - If you have a class with a ``cached_property``: - - .. sourcecode:: python - - from mode.utils.objects import cached_property - - class X: - - @cached_property - def foo(self): - return 42 - - x = X() - print(x.foo) - - From an instance you can now check if the property was accessed: - - .. sourcecode:: python - - if type(x).foo.is_set(x): - print(f'Someone accessed x.foo and it was cached as: {x.foo}') - -.. _version-1.18.0: - -1.18.0 -====== -:release-date: 2018-10-02 3:32 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- **Worker**: Fixed error when starting :pypi:`aioconsole` on ``--debug`` - - The worker would crash with: - - .. sourcecode:: text - - TypeError: Use `self.add_context(ctx)` for non-async context - - when started with the ``--debug`` flag. - -- **Worker**: New ``daemon`` argument controls shutdown of worker. - - When the flag is enabled, the default, the worker will not shut - down until the worker instance is either explicitly stopped, or - it receives a terminating process signal (``SIGINT``/``SIGTERM``/etc.) - - When disabled, the worker for the given service will shut down as soon as - ``await service.start()`` returns. - - You can think of it as a flag for daemons, but one that doesn't actually - do any of the UNIX daemonization stuff (detaching, etc.). It merely - means the worker continues to run in the background until stopped by - signal. - -- **Service**: Added class method: ``Service.from_awaitable``. - - This can be used to create a service out of any coroutine - or :class:`~typing.Awaitable`: - - .. sourcecode:: python - - from mode import Service, Worker - - async def me(interval=1.0): - print('ME STARTING') - await asyncio.sleep(interval) - print('ME STOPPING') - - def run_worker(interval=1.0): - coro = me(interval=1.0) - Worker(Service.from_awaitable(coro)).execute_from_commandline() - - if __name__ == '__main__': - run_worker() - - .. note:: - - Using a service with ``await self.sleep(1.0)`` is often not what - you want, as stopping the service will have to wait for the sleep - to finish. - - ``Service.from_awaitable`` is as such a last resort for cases - where you're provided a coroutine you cannot implement as a service. - - ``Service.sleep()`` is useful as it will stop sleeping immediately - if the service is stopped: - - class Me(Service): - - async def on_start(self) -> None: - await self.sleep(1.0) - -- **Service**: New method ``_repr_name`` can be used to override the service - class name used in ``repr(service)``. - -.. _version-1.17.3: - -1.17.3 -====== -:release-date: 2018-09-18 4:00 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Service: New attribute ``mundane_level`` decides the logging level - of mundane logging events such as "[X] Starting...", for starting/stopping - and tasks being cancelled. - - The value for this must be a logger level name, and is ``"info"`` by - default. - - If logging for a service is noisy at info-level, you can move it - to debug level by setting this attribute to ``"debug"``: - - .. sourcecode:: python - - class X(Service): - mundane_level = 'debug' - -.. _version-1.17.2: - -1.17.2 -====== -:release-date: 2018-09-17 3:00 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Removed and fixed import from ``collections`` that will be moved - to ``collections.abc`` in Python 3.8. - - This also silences a ``DeprecationWarning`` that was being emitted - on Python 3.7. - -- Type annotations now passing checks on :pypi:`mypy` 0.630. - -.. _version-1.17.1: - -1.17.1 -====== -:release-date: 2018-09-13 6:27 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- Fixes several bugs related to unwrapping ``Optional[List[..]]`` - in :func:`mode.utils.objects.annotations`. - - This functionality is not really related to mode at all, so - should be moved out of this library. Faust uses it for models. - -.. _version-1.17.0: - -1.17.0 -====== -:release-date: 2018-09-12 5:39 P.M PDT -:release-by: Ask Solem (:github_user:`ask`) - -- New async iterator utility: :class:`~mode.utils.aiter.arange` - - Like :class:`range` but returns an async iterator:: - - async for n in arange(0, 10, 2): - ... - -- New async iterator utility: :func:`~mode.utils.aiter.aslice` - - Like :class:`itertools.islice` but works on asynchronous iterators. - -- New async iterator utility: :func:`~mode.utils.aiter.chunks` - - :class:`~mode.utils.aiter.chunks` takes an async iterable and divides - it up into chunks of size n:: - - # Split range of 100 numbers into chunks of 10 each. - async for chunk in chunks(arange(100), 10): - yield chunk - - This gives chunks like this:: - - [ - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] - [10, 11, 12, 13, 14, 15, 16, 17, 18, 19], - ..., - ] - -.. _version-1.16.0: - -1.16.0 -====== -:release-date: 2018-09-11 1:37 P.M PDT -:release-by: Ask Solem - -- **Distribution**: Installing mode no longer installs the ``t`` directory - containing tests as a Python package. - - Contributed by Michael Seifert - -- **Testing**: New :class:`~mode.utils.mocks.AsyncContextManagerMock` - - You can use this to mock asynchronous context managers. - - Please see :class:`~mode.utils.mocks.AsyncContextManagerMock` for - an example. - -- **CI**: Python 3.7.0 and 3.6.0 was added to the build matrix. - -.. _version-1.15.1: - -1.15.1 -====== -:release-date: 2018-08-15 11:17 A.M PDT -:release-by: Ask Solem - -- Tests now passing on CPython 3.7.0 - -- **Utils**: Adds ``remove_optional`` function in :mod:`mode.utils.objects` - - This can be used to extract the concrete type from ``Optional[Foo]``. - -- **Utils**: Adds ``humanize_seconds`` function to :mod:`mode.utils.times` - -.. _version-1.15.0: - -1.15.0 -====== -:release-date: 2018-06-27 1:39 P.M PDT -:release-by: Ask Solem - -- Worker: Logging can now be set up using dictionary config, by passing - the ``logging_config`` argument to :class:`mode.Worker`. - - Contributed by Allison Wang. - -- Worker: No longer supports the ``logformat`` argument. - - To set up custom log format you must now pass in dict configuration - via the ``logging_config`` argument. - -- Service: ``start()`` accidentally silenced :exc:`asyncio.CancelledError`. - -- Service: Invalid assert caused :class:`~mode.CrashingSupervisor` to - crash with strange error - -.. _version-1.14.1: - -1.14.1 -====== -:release-date: 2018-06-06 1:26 P.M PDT -:release-by: Ask Solem - -- Service: Fixed "coroutine x was never awaited" for background - tasks (``@Service.task`` decorator) when service is started and stopped - in quick succession. - -.. _version-1.14.0: - -1.14.0 -====== -:release-date: 2018-06-05 12:13 P.M PDT -:release-by: Ask Solem - -- Adds method ``Service.wait_many(futures, *, timeout=None)`` - -.. _version-1.13.0: - -1.13.0 -====== -:release-date: 2018-05-16 1:26 P.M PDT -:release-by: Ask Solem - -- Mode now registers as a library having static type annotations. - - This conforms to :pep:`561` -- a new specification that defines - how Python libraries register type stubs to make them available - for use with static analyzers like :pypi:`mypy` and :pypi:`pyre-check`. - -- The code base now passes ``--strict-optional`` type checks. - -.. _version-1.12.5: - -1.12.5 -====== -:release-date: 2018-05-14 4:48 P.M PDT -:release-by: Ask Solem - -- Supervisor: Fixed wrong index calculation in management - of index-based service restart. - -.. _version-1.12.4: - -1.12.4 -====== -:release-date: 2018-05-07 3:20 P.M PDT -:release-by: Ask Solem - -- Adds new mock class for async functions: :func:`mode.utils.mocks.AsyncMock` - - This can be used to mock an async callable:: - - from mode.utils.mocks import AsyncMock - - class App(Service): - - async def on_start(self): - self.ret = await self.some_async_method('arg') - - async def some_async_method(self, arg): - await asyncio.sleep(1) - - @pytest.fixture - def app(): - return App() - - @pytest.mark.asyncio - async def test_something(*, app): - app.some_async_method = AsyncMock() - async with app: # starts and stops the service, calling on_start - app.some_async_method.assert_called_once_with('arg') - assert app.ret is app.some_async_method.coro.return_value - -- Added 100% test coverage for modules: - - + :mod:`mode.proxy` - + :mod:`mode.threads` - + :mod:`mode.utils.aiter` - -.. _version-1.12.3: - -1.12.3 -====== -:release-date: 2018-05-07 3:33 P.M PDT -:release-by: Ask Solem - -Important Notes ---------------- - -- Moved to https://github.com/ask/mode - -Changes -------- - -- Signal: Improved repr when signal has a default sender. - -- DictAttribute: Now supports ``len`` and ``del(d[key])``. - -- Worker: If overriding ``on_first_start`` you can now call - ``default_on_first_start`` instead of super. - - Example:: - - class MyWorker(Worker): - - async def on_first_start(self) -> None: - print('FIRST START') - await self.default_on_first_start() - -.. _version-1.12.2: - -1.12.2 -====== -:release-date: 2018-04-26 11:47 P.M PDT -:release-by: Ask Solem - -- Fixed shutdown error in :class:`~mode.threads.ServiceThread`. - -.. _version-1.12.1: - -1.12.1 -====== -:release-date: 2018-04-24 11:28 P.M PDT -:release-by: Ask Solem - -- Now works with CPython 3.6.1 and 3.6.0. - -.. _version-1.12.0: - -1.12.0 -====== -:release-date: 2018-04-23 1:28 P.M PDT -:release-by: Ask Solem - -Backward Incompatible Changes ------------------------------ - -+ Changed ``Service.add_context`` - - - To add an async context manager (:class:`~typing.AsyncContextManager`), - use :meth:`~mode.Service.add_async_context`:: - - class S(Service): - - async def on_start(self) -> None: - self.context = await self.add_async_context(MyAsyncContext()) - - - To add a regular context manager (:class:`~typing.ContextManager`), - use :meth:`~mode.Service.add_context`:: - - class S(Service): - - async def on_start(self) -> None: - self.context = self.add_context(MyContext()) - - This change was made so that contexts can be added from non-async - functions. To add an *async context* you still need to be within an - async function definition. - -News ----- - -+ Worker: Now redirects :data:`sys.stdout` and :data:`sys.stderr` to the - logging subsystem by default. - - - To disable this pass ``Worker(redirect_stdouts=False)``. - - - The default severity level for ``print`` statements are - :data:`logging.WARN`, but you can change this using - ``Worker(redirect_stdouts_level='INFO')``. - -+ :class:`~mode.Seconds`/:func:`~mode.want_seconds` can now be expressed - as strings and rate strings: - - - float as string: ``want_seconds('1.203') == 1.203`` - - - *10 in one second*: ``want_seconds('10/s') == 10.0`` - - - *10.33 in one hour*: ``want_seconds('10.3/h') == 0.0028611111111111116`` - - - *100 in one hour*: ``want_seconds('100/h') == 0.02777777777777778`` - - - *100 in one day*: ``want_seconds('100/d') == 0.0011574074074074076`` - - This is especially useful for the rate argument - to the :class:`~mode.utils.times.rate_limit` helper. - -+ Added new context manager: :func:`mode.utils.logging.redirect_stdouts`. - -+ Module :mod:`mode.types` now organized by category: - - - Service types: :mod:`mode.types.services` - - - Signal types: :mod:`mode.types.signals` - - - Supervisor types: :mod:`mode.types.supervisors` - -+ :class:`mode.flight_recorder` can now wrap objects so that every method call - on that object will result in the call and arguments to that call - being logged. - - Example logging statements with ``INFO`` severity:: - - with flight_recorder(logger, timeout=10.0) as on_timeout: - redis = on_timeout.wrap_info(self.redis) - await redis.get(key) - - There's also ``wrap_debug(o)``, ``wrap_warn(o)``, ``wrap_error(o)``, - and for any severity: ``wrap(logging.CRIT, o)``. - -Fixes ------ - -+ Fixed bug in ``Service.wait`` on Python 3.7. - -.. _version-1.11.5: - -1.11.5 -====== -:release-date: 2018-04-19 3:12 P.M PST -:release-by: Ask Solem - -+ FlowControlQueue now available in ``mode.utils.queues``. - - This is a backward compatible change. - -+ Tests for FlowControlQueue - -.. _version-1.11.4: - -1.11.4 -====== -:release-date: 2018-04-19 9:36 A.M PST -:release-by: Ask Solem - -+ Adds :class:`mode.flight_recorder` - - This is a logging utility to log stuff only when something - times out. - - For example if you have a background thread that is sometimes - hanging:: - - class RedisCache(mode.Service): - - @mode.timer(1.0) - def _background_refresh(self) -> None: - self._users = await self.redis_client.get(USER_KEY) - self._posts = await self.redis_client.get(POSTS_KEY) - - You want to figure out on what line this is hanging, but logging - all the time will provide way too much output, and will even change - how fast the program runs and that can mask race conditions, so that - they never happen. - - Use the flight recorder to save the logs and only log when it times out: - - .. sourcecode:: python - - logger = mode.get_logger(__name__) - - class RedisCache(mode.Service): - - @mode.timer(1.0) - def _background_refresh(self) -> None: - with mode.flight_recorder(logger, timeout=10.0) as on_timeout: - on_timeout.info(f'+redis_client.get({USER_KEY!r})') - await self.redis_client.get(USER_KEY) - on_timeout.info(f'-redis_client.get({USER_KEY!r})') - - on_timeout.info(f'+redis_client.get({POSTS_KEY!r})') - await self.redis_client.get(POSTS_KEY) - on_timeout.info(f'-redis_client.get({POSTS_KEY!r})') - - If the body of this :keyword:`with` statement completes before the - timeout, the logs are forgotten about and never emitted -- if it - takes more than ten seconds to complete, we will see these messages - in the log: - - .. sourcecode:: text - - [2018-04-19 09:43:55,877: WARNING]: Warning: Task timed out! - [2018-04-19 09:43:55,878: WARNING]: Please make sure it is hanging before restarting. - [2018-04-19 09:43:55,878: INFO]: [Flight Recorder-1] (started at Thu Apr 19 09:43:45 2018) Replaying logs... - [2018-04-19 09:43:55,878: INFO]: [Flight Recorder-1] (Thu Apr 19 09:43:45 2018) +redis_client.get('user') - [2018-04-19 09:43:55,878: INFO]: [Flight Recorder-1] (Thu Apr 19 09:43:49 2018) -redis_client.get('user') - [2018-04-19 09:43:55,878: INFO]: [Flight Recorder-1] (Thu Apr 19 09:43:46 2018) +redis_client.get('posts') - [2018-04-19 09:43:55,878: INFO]: [Flight Recorder-1] -End of log- - - Now we know this ``redis_client.get`` call can take too long to complete, - and should consider adding a timeout to it. - -.. _version-1.11.3: - -1.11.3 -====== -:release-date: 2018-04-18 5:22 P.M PST -:relese-by: Ask Solem - -- Cry handler (`kill -USR1`): Truncate huge data in stack frames. - -- ServiceProxy: Now supports ``_crash`` method. - -.. _version-1.11.2: - -1.11.2 -====== -:release-date: 2018-04-18 5:02 P.M PST -:release-by: Ask Solem - -- Service: ``add_future()`` now maintains futures in a set and futures - are automatically removed from it when done. - -- Cry handler (`kill -USR1`) now shows name of Service.task background tasks. - -- Stampede: Now propagates cancellation. - -.. _version-1.11.1: - -1.11.1 -====== -:release-date: 2018-04-18 11:08 P.M PST -:release-by: Ask Solem - -- Service.add_context: Now works with AsyncContextManager. - -- CI now runs functional tests. - -- Added supervisor and service tests. - -.. _version-1.11.0: - -1.11.0 -====== -:release-date: 2018-04-17 1:23 P.M PST -:release-by: Ask Solem - -- Supervisor: Fixes bug with max restart triggering too early. - -- Supervisor: Also restart child services. - -- Service: Now supports ``__post_init__`` like Python 3.7 dataclasses. - -- Service: Crash is logged even if crashed multiple times. - -.. _version-1.10.4: - -1.10.4 -====== -:release-date: 2018-04-13 3:53 P.M PST -:release-by: Ask Solem - -- Supervisor: Log full traceback when restarting service. - -.. _version-1.10.3: - -1.10.3 -====== -:release-date: 2018-04-11 10:58 P.M PST -:release-by: Ask Solem - -- setup_logging: now ensure logging is setup by clearing root logger handlers. - -.. _version-1.10.2: - -1.10.2 -====== -:release-date: 2018-04-03 4:50 P.M PST -:release-by: Ask Solem - -- Fixed wrong version number in Changelog. - -.. _version-1.10.1: - -1.10.1 -======= -:release-date: 2018-04-03 4:43 P.M PST -:release-by: Ask Solem - -- Service.wait: If the future we are waiting for is cancelled we must - propagate :exc:`CancelledError`. - -.. _version-1.10.0: - -1.10.0 -====== -:release-date: 2018-03-30 12:36 P.M PST -:release-by: Ask Solem - -- New supervisor: :class:`~mode.supervisors.ForfeitOneForOneSupervisor`. - - If a service in the group crashes we give up on that service - and don't start it again. - -- New supervisor: :class:`~mode.supervisor.ForfeitOneForAllSupervisor`. - - If a service in the group crashes we give up on it, but also - stop all services in the group and give up on them also. - -- Service Logging: Renamed ``self.log.crit`` to ``self.log.critical``. - - The old name is still available and is not deprecated at this time. - - -.. _version-1.9.2: - -1.9.2 -===== -:release-date: 2018-03-20 10:17 P.M PST -:release-by: Ask Solem - -- Adds ``FlowControlEvent.clear()`` to clear all contents of flow - controlled queues. - -- :class:`~mode.utils.futures.FlowControlEvent` now starts in a suspended - state. - - To disable this pass ``FlowControlEvent(initially_suspended=False))``. - -- Adds ``Service.service_reset`` method to reset service - start/stopped/crashed/etc., flags - -.. _version-1.9.1: - -1.9.1 -===== -:release-date: 2018-03-05 11:51 P.M PST -:release-by: Ask Solem - -- No longer depends on :pypi:`terminaltables`. - -.. _version-1.9.0: - -1.9.0 -===== -:release-date: 2018-03-05 11:33 P.M PST -:release-by: Ask Solem - -Backward Incompatible Changes -============================= - -- Module ``mode.utils.debug`` renamed to :mod:`mode.debug`. - - This is unlikely to affect users as this module is only used by mode - internally. - - This module had to move because it imports ``mode.Service``, and - the :mod:`mode.utils` package is not allowed to import from the - :mod:`mode` package at all. - -News -==== - -- Added function :func:`mode.utils.import.smart_import`. - -- Added non-async version of :class:`mode.Signal`: :class:`mode.SyncSignal`. - - The signal works exactly the same as the asynchronous version, except - ``Signal.send`` must not be :keyword:`await`-ed:: - - on_configured = SyncSignal() - on_configured.send(sender=obj) - - -- Added method ``iterate`` to :class:`mode.utils.imports.FactoryMapping`. - - This enables you to iterate over the extensions added to a - :mod:`setuptools` entrypoint. - -Fixes -===== - -- ``StampedeWrapper`` now correctly clears flag when original call done. - -.. _version-1.8.0: - -1.8.0 -===== -:release-date: 2018-02-20 04:01 P.M PST -:release-by: Ask Solem - -Backward Incompatible Changes ------------------------------ - -- API Change to fix memory leak in ``Service.wait``. - - The ``Service.wait(*futures)`` method was added to be able to wait for - this list of futures but also stop waiting if the service is stopped or - crashed:: - - import asyncio - from mode import Service - - class X(Service): - on_thing_ready: asyncio.Event - - def __post_init__(self): - self.on_thing_ready = asyncio.Event(loop=loop) - - @Service.task - async def _my_background_task(self): - while not self.should_stop: - # wait for flag to be set (or service stopped/crashed) - await self.wait(self.on_thing_ready.wait()) - print('FLAG SET') - - The problem with this was - - 1) The wait flag would return None and not raise an exception if the - service is stopped/crashed. - 2) Futures would be scheduled on the event loop but not properly cleaned - up, creating a very slow memory leak. - 3) No return value was returned for succesful feature. - - So to properly implement this we had to change the API of the ``wait`` - method to return a tuple instead, and to only allow a single coroutine to - be passed to wait:: - - @Service.task - async def _my_background_task(self): - while not self.should_stop: - # wait for flag to be set (or service stopped/crashed) - result, stopped = await self.wait(self.on_thing_ready) - if not stopped: - print('FLAG SET') - - This way the user can provide an alternate path when the service is - stopped/crashed while waiting for this event. - - A new shortcut method ``wait_for_stopped(fut)`` was also added:: - - # wait for flag to be set (or service stopped/crashed) - if not await self.wait_for_stopped(self.on_thing_ready): - print('FLAG SET') - - Moreover, you can now pass :class:`asyncio.Event` objects directly to - ``wait()``. - -News ----- - -- Added :class:`mode.utils.collections.DictAttribute`. - -- Added :class:`mode.utils.collections.AttributeDict`. - -Bugs ----- - -- Signals can create clone of signal with default sender already set - - .. code-block:: python - - signal: Signal[int] = Signal() - signal = signal.with_default_sender(obj) - -.. _version-1.7.0: - -1.7.0 -===== -:release-date: 2018-02-05 12:28 P.M PST -:release-by: Ask Solem - -- Adds :mod:`mode.utils.aiter` for missing ``aiter`` and ``anext`` functions. - -- Adds :mod:`mode.utils.futures` for :class:`asyncio.Task` related tools. - -- Adds :mod:`mode.utils.collections` for custom mapping/set and list - data structures. - -- Adds :mod:`mode.utils.imports` for importing modules at runtime, - as well as utilities for typed :mod:`setuptools` entry-points. - -- Adds :mod:`mode.utils.text` for fuzzy matching user input. - -.. _version-1.6.0: - -1.6.0 -===== -:release-date: 2018-02-05 11:10 P.M PST -:release-by: Ask Solem - -- Fixed bug where ``@Service.task`` background tasks were not started - in subclasses. - -- Service: Now has two exit stacks: ``.exit_stack`` & ``.async_exit_stack``. - - This is a backward incompatible change, but probably nobody was accessing - ``.exit_stack`` directly. - - Use ``await Service.enter_context(ctx)`` with both regular and - asynchronous context managers:: - - class X(Service): - - async def on_start(self) -> None: - # works with both context manager types. - await self.enter_context(async_context) - await self.enter_context(context) - -- Adds :func:`~mode.utils.contextlib.asynccontextmanager`` decorator - from CPython 3.7b1. - - This decorator works exactly the same as - :func:`contextlib.contextmanager`, but for :keyword:`async with`. - - Import it from :mod:`mode.utils.contexts`:: - - from mode.utils.contexts import asynccontextmanager - - @asynccontextmanager - async def connection_or_default(conn: Connection = None) -> Connection: - if connection is None: - async with connection_pool.acquire(): - yield - else: - yield connection - - async def main(): - async with connection_or_default() as connection: - ... - -- Adds :class:`~mode.utils.contexts.AsyncExitStack` from CPython 3.7b1 - - This works like :class:`contextlib.ExitStack`, but for asynchronous - context managers used with :keyword:`async with`. - -- Logging: Worker debug log messages are now colored blue when colors are - enabled. - - -.. _version-1.5.0: - -1.5.0 -===== -:release-date: 2018-01-04 03:43 P.M PST -:release-by: Ask Solem - -- Service: Adds new ``await self.add_context(context)`` - - This adds a new context manager to be entered when the service starts, - and exited once the service exits. - - The context manager can be either a :class:`typing.AsyncContextManager` - (:keyword:`async with`) or a - regular :class:`typing.ContextManager` (:keyword:`with`). - -- Service: Added ``await self.add_runtime_dependency()`` which unlike - ``add_dependency`` starts the dependent service if the self is already - started. - -- Worker: Now supports a new ``console_port`` argument to specify a port - for the :pypi:`aiomonitor` console, different than the default (50101). - - .. note:: - - The aiomonitor console is only started when ``Worker(debug=True, ...)`` - is set. - -.. _version-1.4.0: - -1.4.0 -===== -:release-date: 2017-12-21 09:50 A.M PST -:release-by: Ask Solem - -- Worker: Add support for parameterized logging handlers. - - Contributed by Prithvi Narasimhan. - -.. _version-1.3.0: - -1.3.0 -===== -:release-date: 2017-12-04 01:17 P.M PST -:release-by: Ask Solem - -- Now supports color output in logs when logging to a terminal. - -- Now depends on :pypi:`colorlog` - -- Added :class:`mode.Signal`: async. implementation of the observer - pattern (think Django signals). - -- DependencyGraph is now a generic type: ``DependencyGraph[int]`` - -- Node is now a generic type: ``Node[Service]``. - -.. _version-1.2.1: - -1.2.1 -===== -:release-date: 2017-11-06 04:50 P.M PST -:release-by: Ask Solem - -- Service: Subclasses can now override a Service.task method. - - Previously it would unexpectedly start two tasks: - the task defined in the superclass and the task defined in - the subclass. - -.. _version-1.2.0: - -1.2.0 -===== -:release-date: 2017-11-02 03:17 P.M PDT -:release-by: Ask Solem - -- Renames PoisonpillSupervisor to CrashingSupervisor. - -- Child services now stopped even if not fully started. - - Previously ``child_service.stop()`` would not be called - if `child_service.start()` never completed, but as a service - might be in the process of starting other child services, we need - to call stop even if not fully started. - -.. _version-1.1.1: - -1.1.1 -===== -:release-date: 2017-10-25 04:34 P.M PDT -:release-by: Ask Solem - -- Added alternative event loop implementations: eventlet, gevent, uvloop. - - E.g. to use gevent as the event loop, install mode using: - - .. sourcecode:: console - - $ pip install mode[gevent] - - and add this line to the top of your worker entrypoint module:: - - import mode.loop - mode.loop.use('gevent') - -- Service: More fixes for the weird `__init_subclass__` behavior - only seen in Python 3.6.3. - -- ServiceThread: Now propagates errors raised in the thread - to the main thread. - -.. _version-1.1.0: - -1.1.0 -===== -:release-date: 2017-10-19 01:35 P.M PDT -:release-by: Ask Solem - -- ServiceThread: Now inherits from Service, and uses - ``loop.run_in_executor()`` to start the service as a thread. - -- setup_logging: filename argument is now respected. - -.. _version-1.0.2: - -1.0.2 -===== -:release-date: 2017-10-10 01:51 P.M PDT -:release-by: Ask Solem - -- Adds support for Python 3.6.0 - -- Adds backports of typing improvements in CPython 3.6.1 - to ``mode.utils.compat``: ``AsyncContextManager``, ``ChainMap``, - ``Counter``, and ``Deque``. - -- ``Supervisor.add`` and ``.discard`` now takes an arbitrary number - of services to add/discard as star arguments. - -- Fixed typo in example: ``Service.task`` -> ``mode.Service.task``. - - Contributed by Xu Jing. - -.. _version-1.0.1: - -1.0.1 +0.1.0 ===== -:release-date: 2017-10-05 02:53 P.M PDT -:release-by: Ask Solem +:release-date: 2020-12-17 14:00 P.M CET +:release-by: Thomas Sarboni (:github_user:`max-k`) -- Fixes compatibility with Python 3.6.3. +- Friendly fork of ask/mode : Initial release - Python 3.6.3 badly broke ``__init_subclass__``, in such a way that - any class attribute set is set for all subclasses. +- Move to new travis-ci.com domain -.. _version-1.0.0: +- Add tests on Python 3.8.1-3.8.6 -1.0.0 -===== -:release-date: 2017-10-04 01:29 P.M PDT -:release-by: Ask Solem +- Fix broken tests -- Initial release +- Add Python 3.9 support diff --git a/README.rst b/README.rst index 57645c29..f5339c81 100644 --- a/README.rst +++ b/README.rst @@ -1,15 +1,30 @@ ===================================================================== - Mode: AsyncIO Services + AsyncIO Services Fork ===================================================================== |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| -:Version: 4.3.2 -:Web: http://mode.readthedocs.org/ -:Download: http://pypi.org/project/mode -:Source: http://github.com/ask/mode +:Version: 0.2.0 +:Web: http://mode-streaming.readthedocs.org/ +:Download: http://pypi.org/project/mode-streaming +:Source: http://github.com/faust-streaming/mode :Keywords: async, service, framework, actors, bootsteps, graph + +Why the fork +============ + +We have decided to fork the original *Mode* project because there is a critical process of releasing new versions which causes uncertainty in the community. Everybody is welcome to contribute to this *fork*, and you can be added as a manitainer. + +We want to: + +- Ensure continues release +- Code quality +- Support latest Python versions +- Update the documentation + +and more... + What is Mode? ============= @@ -239,7 +254,7 @@ or from source. To install using `pip`:: - $ pip install -U mode + $ pip install -U mode-streaming .. _installing-from-source: @@ -247,12 +262,12 @@ Downloading and installing from source -------------------------------------- Download the latest version of Mode from -http://pypi.org/project/mode +http://pypi.org/project/mode-streaming You can install it by doing the following:: - $ tar xvfz mode-0.0.0.tar.gz - $ cd mode-0.0.0 + $ tar xvfz mode-streaming-0.2.0.tar.gz + $ cd mode-0.2.0 $ python setup.py build # python setup.py install @@ -270,7 +285,7 @@ With pip You can install the latest snapshot of Mode using the following pip command:: - $ pip install https://github.com/ask/mode/zipball/master#egg=mode + $ pip install https://github.com/faust-streaming/mode/zipball/master#egg=mode-streaming FAQ === @@ -290,7 +305,7 @@ and you can install this as a bundle with Mode: .. sourcecode:: console - $ pip install -U mode[gevent] + $ pip install -U mode-streaming[gevent] Then to actually use gevent as the event loop you have to execute the following in your entrypoint module (usually where you @@ -315,7 +330,7 @@ and you can install this as a bundle with Mode: .. sourcecode:: console - $ pip install -U mode[eventlet] + $ pip install -U mode-streaming[eventlet] Then to actually use eventlet as the event loop you have to execute the following in your entrypoint module (usually where you @@ -447,26 +462,25 @@ reported by opening an issue or contacting one or more of the project maintainer This Code of Conduct is adapted from the Contributor Covenant, version 1.2.0 available at http://contributor-covenant.org/version/1/2/0/. -.. |build-status| image:: https://secure.travis-ci.org/ask/mode.png?branch=master +.. |build-status| image:: https://travis-ci.com/faust-streaming/mode.png?branch=master :alt: Build status - :target: https://travis-ci.org/ask/mode + :target: https://travis-ci.com/faust-streaming/mode -.. |coverage| image:: https://codecov.io/github/ask/mode/coverage.svg?branch=master - :target: https://codecov.io/github/ask/mode?branch=master +.. |coverage| image:: https://codecov.io/github/faust-streaming/mode/coverage.svg?branch=master + :target: https://codecov.io/github/faust-streaming/mode?branch=master -.. |license| image:: https://img.shields.io/pypi/l/mode.svg +.. |license| image:: https://img.shields.io/pypi/l/mode-streaming.svg :alt: BSD License :target: https://opensource.org/licenses/BSD-3-Clause -.. |wheel| image:: https://img.shields.io/pypi/wheel/mode.svg +.. |wheel| image:: https://img.shields.io/pypi/wheel/mode-streaming.svg :alt: Mode can be installed via wheel - :target: http://pypi.org/project/mode/ + :target: http://pypi.org/project/mode-streaming/ -.. |pyversion| image:: https://img.shields.io/pypi/pyversions/mode.svg +.. |pyversion| image:: https://img.shields.io/pypi/pyversions/mode-streaming.svg :alt: Supported Python versions. - :target: http://pypi.org/project/mode/ + :target: http://pypi.org/project/mode-streaming/ -.. |pyimp| image:: https://img.shields.io/pypi/implementation/mode.svg +.. |pyimp| image:: https://img.shields.io/pypi/implementation/mode-streaming.svg :alt: Supported Python implementations. - :target: http://pypi.org/project/mode/ - + :target: http://pypi.org/project/mode-streaming/ diff --git a/docs/conf.py b/docs/conf.py index 5bec41db..a7368825 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,58 +1,61 @@ # -*- coding: utf-8 -*- import sys from contextlib import suppress + from sphinx_celery import conf -sys.path.append('.') +sys.path.append(".") extensions = [] -globals().update(conf.build_config( - 'mode', __file__, - project='Mode', - # version_dev='2.0', - # version_stable='1.4', - canonical_url='http://mode.readthedocs.io', - webdomain='', - github_project='ask/mode', - copyright='2017-2020', - html_logo='images/logo.png', - html_favicon='images/favicon.ico', - html_prepend_sidebars=[], - include_intersphinx={'python', 'sphinx'}, - extra_extensions=[ - 'sphinx.ext.napoleon', - 'sphinx_autodoc_annotation', - 'alabaster', - ], - extra_intersphinx_mapping={ - }, - # django_settings='testproj.settings', - # from pathlib import Path - # path_additions=[Path.cwd().parent / 'testproj'] - apicheck_ignore_modules=[ - 'mode.loop.eventlet', - 'mode.loop.gevent', - 'mode.loop.uvloop', - 'mode.loop._gevent_loop', - 'mode.utils', - 'mode.utils._py37_contextlib', - 'mode.utils.graphs.formatter', - 'mode.utils.graphs.graph', - 'mode.utils.types', - ], -)) - -html_theme = 'alabaster' +globals().update( + conf.build_config( + "mode", + __file__, + project="Mode", + # version_dev='2.0', + # version_stable='1.4', + canonical_url="http://mode-streaming.readthedocs.io", + webdomain="", + github_project="faust-streaming/mode", + copyright="2017-2020", + html_logo="images/logo.png", + html_favicon="images/favicon.ico", + html_prepend_sidebars=[], + include_intersphinx={"python", "sphinx"}, + extra_extensions=[ + "sphinx.ext.napoleon", + "sphinx_autodoc_annotation", + "alabaster", + ], + extra_intersphinx_mapping={}, + # django_settings='testproj.settings', + # from pathlib import Path + # path_additions=[Path.cwd().parent / 'testproj'] + apicheck_ignore_modules=[ + "mode.loop.eventlet", + "mode.loop.gevent", + "mode.loop.uvloop", + "mode.loop._gevent_loop", + "mode.utils", + "mode.utils._py37_contextlib", + "mode.utils.graphs.formatter", + "mode.utils.graphs.graph", + "mode.utils.types", + ], + ) +) + +html_theme = "alabaster" html_sidebars = {} -templates_path = ['_templates'] +templates_path = ["_templates"] -autodoc_member_order = 'bysource' +autodoc_member_order = "bysource" -pygments_style = 'sphinx' +pygments_style = "sphinx" # This option is deprecated and raises an error. with suppress(NameError): - del(html_use_smartypants) # noqa + del html_use_smartypants # noqa -extensions.remove('sphinx.ext.viewcode') +extensions.remove("sphinx.ext.viewcode") diff --git a/docs/includes/installation.txt b/docs/includes/installation.txt index 01080ba8..f53d9414 100644 --- a/docs/includes/installation.txt +++ b/docs/includes/installation.txt @@ -8,7 +8,7 @@ or from source. To install using `pip`:: - $ pip install -U mode + $ pip install -U mode-streaming .. _installing-from-source: @@ -16,12 +16,12 @@ Downloading and installing from source -------------------------------------- Download the latest version of Mode from -http://pypi.org/project/mode +http://pypi.org/project/mode-streaming You can install it by doing the following:: - $ tar xvfz mode-0.0.0.tar.gz - $ cd mode-0.0.0 + $ tar xvfz mode-streaming-0.1.0.tar.gz + $ cd mode-streaming-0.1.0 $ python setup.py build # python setup.py install @@ -39,4 +39,4 @@ With pip You can install the latest snapshot of Mode using the following pip command:: - $ pip install https://github.com/ask/mode/zipball/master#egg=mode + $ pip install https://github.com/faust-streaming/mode/zipball/master#egg=mode diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 9b066abe..91676ed7 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,7 +1,7 @@ -:Version: 4.3.2 -:Web: http://mode.readthedocs.org/ -:Download: http://pypi.org/project/mode -:Source: http://github.com/ask/mode +:Version: 0.2.0 +:Web: http://mode-streaming.readthedocs.org/ +:Download: http://pypi.org/project/mode-streaming +:Source: http://github.com/faust-streaming/mode :Keywords: async, service, framework, actors, bootsteps, graph What is Mode? diff --git a/docs/index.rst b/docs/index.rst index ded15b38..156eb339 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -30,4 +30,3 @@ Indices and tables * :ref:`genindex` * :ref:`modindex` * :ref:`search` - diff --git a/docs/reference/mode.loop.eventlet.rst b/docs/reference/mode.loop.eventlet.rst index 5c8eaccc..e2cac7f0 100644 --- a/docs/reference/mode.loop.eventlet.rst +++ b/docs/reference/mode.loop.eventlet.rst @@ -10,4 +10,3 @@ Importing this module directly will set the global event loop. See :mod:`faust.loop` for more information. - diff --git a/docs/reference/mode.loop.gevent.rst b/docs/reference/mode.loop.gevent.rst index d86d01ad..6f50695f 100644 --- a/docs/reference/mode.loop.gevent.rst +++ b/docs/reference/mode.loop.gevent.rst @@ -10,4 +10,3 @@ Importing this module directly will set the global event loop. See :mod:`faust.loop` for more information. - diff --git a/docs/reference/mode.loop.uvloop.rst b/docs/reference/mode.loop.uvloop.rst index 181a1ede..72fe42a5 100644 --- a/docs/reference/mode.loop.uvloop.rst +++ b/docs/reference/mode.loop.uvloop.rst @@ -10,4 +10,3 @@ Importing this module directly will set the global event loop. See :mod:`faust.loop` for more information. - diff --git a/docs/reference/mode.utils.cron.rst b/docs/reference/mode.utils.cron.rst new file mode 100644 index 00000000..4f02440c --- /dev/null +++ b/docs/reference/mode.utils.cron.rst @@ -0,0 +1,11 @@ +===================================================== + ``mode.utils.cron`` +===================================================== + +.. contents:: + :local: +.. currentmodule:: mode.utils.cron + +.. automodule:: mode.utils.cron + :members: + :undoc-members: diff --git a/docs/templates/readme.txt b/docs/templates/readme.txt index 3835d7ee..7e23aaf3 100644 --- a/docs/templates/readme.txt +++ b/docs/templates/readme.txt @@ -12,25 +12,25 @@ .. include:: ../includes/code-of-conduct.txt -.. |build-status| image:: https://secure.travis-ci.org/ask/mode.png?branch=master +.. |build-status| image:: https://travis-ci.com/faust-streaming/mode.png?branch=master :alt: Build status - :target: https://travis-ci.org/ask/mode + :target: https://travis-ci.com/faust-streaming/mode -.. |coverage| image:: https://codecov.io/github/ask/mode/coverage.svg?branch=master - :target: https://codecov.io/github/ask/mode?branch=master +.. |coverage| image:: https://codecov.io/github/faust-streaming/mode/coverage.svg?branch=master + :target: https://codecov.io/github/faust-streaming/mode?branch=master -.. |license| image:: https://img.shields.io/pypi/l/mode.svg +.. |license| image:: https://img.shields.io/pypi/l/mode-streaming.svg :alt: BSD License :target: https://opensource.org/licenses/BSD-3-Clause -.. |wheel| image:: https://img.shields.io/pypi/wheel/mode.svg +.. |wheel| image:: https://img.shields.io/pypi/wheel/mode-streaming.svg :alt: Mode can be installed via wheel - :target: http://pypi.org/project/mode/ + :target: http://pypi.org/project/mode-streaming/ -.. |pyversion| image:: https://img.shields.io/pypi/pyversions/mode.svg +.. |pyversion| image:: https://img.shields.io/pypi/pyversions/mode-streaming.svg :alt: Supported Python versions. - :target: http://pypi.org/project/mode/ + :target: http://pypi.org/project/mode-streaming/ -.. |pyimp| image:: https://img.shields.io/pypi/implementation/mode.svg +.. |pyimp| image:: https://img.shields.io/pypi/implementation/mode-streaming.svg :alt: Supported Python implementations. - :target: http://pypi.org/project/mode/ + :target: http://pypi.org/project/mode-streaming/ diff --git a/examples/service.py b/examples/service.py index 542ff05a..1539a18e 100644 --- a/examples/service.py +++ b/examples/service.py @@ -2,22 +2,21 @@ class MyService(mode.Service): - async def on_started(self) -> None: - self.log.info('Service started (hit ctrl+C to exit).') + self.log.info("Service started (hit ctrl+C to exit).") @mode.Service.task async def _background_task(self) -> None: - print('BACKGROUND TASK STARTING') + print("BACKGROUND TASK STARTING") while not self.should_stop: await self.sleep(1.0) - print('BACKGROUND SERVICE WAKING UP') + print("BACKGROUND SERVICE WAKING UP") -if __name__ == '__main__': +if __name__ == "__main__": mode.Worker( MyService(), - loglevel='INFO', + loglevel="INFO", logfile=None, # stderr # when daemon the worker must be explicitly stopped to end. daemon=True, diff --git a/examples/tutorial.py b/examples/tutorial.py index 9eb6a65a..d9d4803f 100644 --- a/examples/tutorial.py +++ b/examples/tutorial.py @@ -2,6 +2,7 @@ from typing import Any, List, MutableMapping from aiohttp.web import Application + from mode import Service from mode.threads import ServiceThread from mode.utils.objects import cached_property @@ -12,17 +13,16 @@ class User: def remove_expired_users(d): - print('REMOVING EXPIRED USERS') + print("REMOVING EXPIRED USERS") ... # implement yourself async def run_websocket_server(): - print('STARTING WEBSOCKET SERVER') + print("STARTING WEBSOCKET SERVER") ... # implement yourself class Websockets(Service): - def __init__(self, port: int = 8081, **kwargs: Any) -> None: self.port = 8081 self._server = None @@ -37,11 +37,7 @@ async def on_stop(self) -> None: class Webserver(ServiceThread): - - def __init__(self, - port: int = 8000, - bind: str = None, - **kwargs: Any) -> None: + def __init__(self, port: int = 8000, bind: str = None, **kwargs: Any) -> None: self._app = Application() self.port = port self.bind = bind @@ -53,9 +49,8 @@ async def on_start(self) -> None: handler = self._handler = self._app.make_handler() # self.loop is the event loop in this thread # self.parent_loop is the loop that created this thread. - self._srv = await self.loop.create_server( - handler, self.bind, self.port) - self.log.info('Serving on port %s', self.port) + self._srv = await self.loop.create_server(handler, self.bind, self.port) + self.log.info("Serving on port %s", self.port) async def on_thread_stop(self) -> None: # on_thread_stop() executes in the thread. @@ -63,18 +58,18 @@ async def on_thread_stop(self) -> None: # quite a few steps required to stop the aiohttp server: if self._srv is not None: - self.log.info('Closing server') + self.log.info("Closing server") self._srv.close() - self.log.info('Waiting for server to close handle') + self.log.info("Waiting for server to close handle") await self._srv.wait_closed() if self._app is not None: - self.log.info('Shutting down web application') + self.log.info("Shutting down web application") await self._app.shutdown() if self._handler is not None: - self.log.info('Waiting for handler to shut down') + self.log.info("Waiting for handler to shut down") await self._handler.shutdown(60.0) if self._app is not None: - self.log.info('Cleanup') + self.log.info("Cleanup") await self._app.cleanup() @@ -97,12 +92,13 @@ async def _remove_expired(self): class App(Service): - - def __init__(self, - web_port: int = 8000, - web_bind: str = None, - websocket_port: int = 8001, - **kwargs: Any) -> None: + def __init__( + self, + web_port: int = 8000, + web_bind: str = None, + websocket_port: int = 8001, + **kwargs: Any + ) -> None: self.web_port = web_port self.web_bind = web_bind self.websocket_port = websocket_port @@ -116,14 +112,16 @@ def on_init_dependencies(self) -> List: ] async def on_start(self) -> None: - import pydot import io + + import pydot + o = io.StringIO() beacon = self.beacon.root or self.beacon beacon.as_graph().to_dot(o) - graph, = pydot.graph_from_dot_data(o.getvalue()) - print('WRITING GRAPH TO image.png') - with open('image.png', 'wb') as fh: + (graph,) = pydot.graph_from_dot_data(o.getvalue()) + print("WRITING GRAPH TO image.png") + with open("image.png", "wb") as fh: fh.write(graph.create_png()) @cached_property @@ -150,6 +148,7 @@ def user_cache(self) -> UserCache: app = App() -if __name__ == '__main__': +if __name__ == "__main__": from mode.worker import Worker - Worker(app, loglevel='info', daemon=True).execute_from_commandline() + + Worker(app, loglevel="info", daemon=True).execute_from_commandline() diff --git a/extra/bandit/baseline.json b/extra/bandit/baseline.json index 68afae14..f05fb96b 100644 --- a/extra/bandit/baseline.json +++ b/extra/bandit/baseline.json @@ -537,4 +537,4 @@ "test_name": "try_except_pass" } ] -} \ No newline at end of file +} diff --git a/extra/bandit/config.yml b/extra/bandit/config.yml index ee7ec19c..38fba5ae 100644 --- a/extra/bandit/config.yml +++ b/extra/bandit/config.yml @@ -395,4 +395,3 @@ weak_cryptographic_key: weak_key_size_ec_medium: 224 weak_key_size_rsa_high: 1024 weak_key_size_rsa_medium: 2048 - diff --git a/mode/__init__.py b/mode/__init__.py index 69167874..d647e739 100644 --- a/mode/__init__.py +++ b/mode/__init__.py @@ -6,13 +6,17 @@ import re import sys import typing + +# Lazy loading. +# - See werkzeug/__init__.py for the rationale behind this. +from types import ModuleType # noqa from typing import Any, Mapping, NamedTuple, Sequence -__version__ = '4.3.2' -__author__ = 'Robinhood Markets' -__contact__ = 'opensource@robinhood.com' -__homepage__ = 'https://github.com/ask/mode' -__docformat__ = 'restructuredtext' +__version__ = "0.2.0" +__author__ = "Robinhood Markets" +__contact__ = "opensource@robinhood.com" +__homepage__ = "https://github.com/faust-streaming/mode" +__docformat__ = "restructuredtext" # -eof meta- @@ -27,89 +31,86 @@ class version_info_t(NamedTuple): # bumpversion can only search for {current_version} # so we have to parse the version here. -_match = re.match(r'(\d+)\.(\d+).(\d+)(.+)?', __version__) +_match = re.match(r"(\d+)\.(\d+).(\d+)(.+)?", __version__) if _match is None: # pragma: no cover - raise RuntimeError('MODE VERSION HAS ILLEGAL FORMAT') + raise RuntimeError("MODE VERSION HAS ILLEGAL FORMAT") _temp = _match.groups() VERSION = version_info = version_info_t( - int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or '', '') -del(_match) -del(_temp) -del(re) + int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or "", "" +) +del _match +del _temp +del re if sys.version_info <= (3, 7): # pragma: no cover import aiocontextvars # noqa if typing.TYPE_CHECKING: # pragma: no cover - from .services import Service, task, timer # noqa: E402 - from .signals import BaseSignal, Signal, SyncSignal # noqa: E402 - from .supervisors import ( # noqa: E402 + from .services import Service, task, timer # noqa: E402 + from .signals import BaseSignal, Signal, SyncSignal # noqa: E402 + from .supervisors import CrashingSupervisor # noqa: E402 + from .supervisors import ( ForfeitOneForAllSupervisor, ForfeitOneForOneSupervisor, OneForAllSupervisor, OneForOneSupervisor, SupervisorStrategy, - CrashingSupervisor, ) - from .types.services import ServiceT # noqa: E402 + from .types.services import ServiceT # noqa: E402 from .types.signals import BaseSignalT, SignalT, SyncSignalT # noqa: E402 - from .types.supervisors import SupervisorStrategyT # noqa: E402 - from .utils.times import Seconds, want_seconds # noqa: E402 - from .utils.logging import ( # noqa: E402 - flight_recorder, - get_logger, - setup_logging, - ) - from .utils.objects import label, shortlabel # noqa: E402 - from .worker import Worker # noqa: E402 + from .types.supervisors import SupervisorStrategyT # noqa: E402 + from .utils.logging import flight_recorder, get_logger, setup_logging # noqa: E402 + from .utils.objects import label, shortlabel # noqa: E402 + from .utils.times import Seconds, want_seconds # noqa: E402 + from .worker import Worker # noqa: E402 __all__ = [ - 'BaseSignal', - 'BaseSignalT', - 'Service', - 'Signal', - 'SignalT', - 'SyncSignal', - 'SyncSignalT', - 'ForfeitOneForAllSupervisor', - 'ForfeitOneForOneSupervisor', - 'OneForAllSupervisor', - 'OneForOneSupervisor', - 'SupervisorStrategy', - 'CrashingSupervisor', - 'ServiceT', 'SupervisorStrategyT', - 'Seconds', 'want_seconds', - 'get_logger', 'setup_logging', - 'label', 'shortlabel', - 'Worker', - 'task', - 'timer', - 'flight_recorder', + "BaseSignal", + "BaseSignalT", + "Service", + "Signal", + "SignalT", + "SyncSignal", + "SyncSignalT", + "ForfeitOneForAllSupervisor", + "ForfeitOneForOneSupervisor", + "OneForAllSupervisor", + "OneForOneSupervisor", + "SupervisorStrategy", + "CrashingSupervisor", + "ServiceT", + "SupervisorStrategyT", + "Seconds", + "want_seconds", + "get_logger", + "setup_logging", + "label", + "shortlabel", + "Worker", + "task", + "timer", + "flight_recorder", ] -# Lazy loading. -# - See werkzeug/__init__.py for the rationale behind this. -from types import ModuleType # noqa - all_by_module: Mapping[str, Sequence[str]] = { - 'mode.services': ['Service', 'task', 'timer'], - 'mode.signals': ['BaseSignal', 'Signal', 'SyncSignal'], - 'mode.supervisors': [ - 'ForfeitOneForAllSupervisor', - 'ForfeitOneForOneSupervisor', - 'OneForAllSupervisor', - 'OneForOneSupervisor', - 'SupervisorStrategy', - 'CrashingSupervisor', + "mode.services": ["Service", "task", "timer"], + "mode.signals": ["BaseSignal", "Signal", "SyncSignal"], + "mode.supervisors": [ + "ForfeitOneForAllSupervisor", + "ForfeitOneForOneSupervisor", + "OneForAllSupervisor", + "OneForOneSupervisor", + "SupervisorStrategy", + "CrashingSupervisor", ], - 'mode.types.services': ['ServiceT'], - 'mode.types.signals': ['BaseSignalT', 'SignalT', 'SyncSignalT'], - 'mode.types.supervisors': ['SupervisorStrategyT'], - 'mode.utils.times': ['Seconds', 'want_seconds'], - 'mode.utils.logging': ['flight_recorder', 'get_logger', 'setup_logging'], - 'mode.utils.objects': ['label', 'shortlabel'], - 'mode.worker': ['Worker'], + "mode.types.services": ["ServiceT"], + "mode.types.signals": ["BaseSignalT", "SignalT", "SyncSignalT"], + "mode.types.supervisors": ["SupervisorStrategyT"], + "mode.utils.times": ["Seconds", "want_seconds"], + "mode.utils.logging": ["flight_recorder", "get_logger", "setup_logging"], + "mode.utils.objects": ["label", "shortlabel"], + "mode.worker": ["Worker"], } object_origins = {} @@ -123,8 +124,7 @@ class _module(ModuleType): def __getattr__(self, name: str) -> Any: if name in object_origins: - module = __import__( - object_origins[name], None, None, [name]) + module = __import__(object_origins[name], None, None, [name]) for extra_name in all_by_module[module.__name__]: setattr(self, extra_name, getattr(module, extra_name)) return getattr(module, name) @@ -132,11 +132,26 @@ def __getattr__(self, name: str) -> Any: def __dir__(self) -> Sequence[str]: result = list(new_module.__all__) - result.extend(('__file__', '__path__', '__doc__', '__all__', - '__docformat__', '__name__', '__path__', - 'VERSION', 'version_info_t', 'version_info', - '__package__', '__version__', '__author__', - '__contact__', '__homepage__', '__docformat__')) + result.extend( + ( + "__file__", + "__path__", + "__doc__", + "__all__", + "__docformat__", + "__name__", + "__path__", + "VERSION", + "version_info_t", + "version_info", + "__package__", + "__version__", + "__author__", + "__contact__", + "__homepage__", + "__docformat__", + ) + ) return result @@ -144,18 +159,20 @@ def __dir__(self) -> Sequence[str]: old_module = sys.modules[__name__] new_module = sys.modules[__name__] = _module(__name__) -new_module.__dict__.update({ - '__file__': __file__, - '__path__': __path__, # type: ignore - '__doc__': __doc__, - '__all__': tuple(object_origins), - '__version__': __version__, - '__author__': __author__, - '__contact__': __contact__, - '__homepage__': __homepage__, - '__docformat__': __docformat__, - '__package__': __package__, - 'version_info_t': version_info_t, - 'version_info': version_info, - 'VERSION': VERSION, -}) +new_module.__dict__.update( + { + "__file__": __file__, + "__path__": __path__, # type: ignore + "__doc__": __doc__, + "__all__": tuple(object_origins), + "__version__": __version__, + "__author__": __author__, + "__contact__": __contact__, + "__homepage__": __homepage__, + "__docformat__": __docformat__, + "__package__": __package__, + "version_info_t": version_info_t, + "version_info": version_info, + "VERSION": VERSION, + } +) diff --git a/mode/debug.py b/mode/debug.py index 6ac5c688..3f2a80af 100644 --- a/mode/debug.py +++ b/mode/debug.py @@ -9,20 +9,26 @@ from .utils.logging import get_logger from .utils.times import Seconds, want_seconds -__all__ = ['Blocking', 'BlockingDetector'] +__all__ = ["Blocking", "BlockingDetector"] logger = get_logger(__name__) -if hasattr(signal, 'setitimer'): # pragma: no cover +if hasattr(signal, "setitimer"): # pragma: no cover + def arm_alarm(seconds: float) -> None: signal.setitimer(signal.ITIMER_REAL, seconds) + + else: # pragma: no cover try: import itimer except ImportError: + def arm_alarm(seconds: float) -> None: signal.alarm(math.ceil(seconds)) + else: + def arm_alarm(seconds: float) -> None: itimer(seconds) @@ -47,10 +53,9 @@ class BlockingDetector(Service): logger = logger - def __init__(self, - timeout: Seconds, - raises: Type[BaseException] = Blocking, - **kwargs: Any) -> None: + def __init__( + self, timeout: Seconds, raises: Type[BaseException] = Blocking, **kwargs: Any + ) -> None: self.timeout: float = want_seconds(timeout) self.raises: Type[BaseException] = raises super().__init__(**kwargs) @@ -75,9 +80,8 @@ def _arm(self, timeout: float) -> None: arm_alarm(timeout) def _on_alarm(self, signum: int, frame: FrameType) -> None: - msg = f'Blocking detected (timeout={self.timeout})' - stack = ''.join(traceback.format_stack(frame)) - self.log.warning('Blocking detected (timeout=%r) %s', - self.timeout, stack) + msg = f"Blocking detected (timeout={self.timeout})" + stack = "".join(traceback.format_stack(frame)) + self.log.warning("Blocking detected (timeout=%r) %s", self.timeout, stack) self._reset_signal() raise self.raises(msg) diff --git a/mode/exceptions.py b/mode/exceptions.py index bea6583c..c2d06b7e 100644 --- a/mode/exceptions.py +++ b/mode/exceptions.py @@ -1,6 +1,6 @@ """Custom exceptions.""" -__all__ = ['MaxRestartsExceeded'] +__all__ = ["MaxRestartsExceeded"] class MaxRestartsExceeded(Exception): diff --git a/mode/locals.py b/mode/locals.py index 08d0a7cb..5869089c 100644 --- a/mode/locals.py +++ b/mode/locals.py @@ -74,7 +74,6 @@ class XProxy(MutableMappingRole, AsyncContextManagerRole): """ import sys import typing - from collections import deque from functools import wraps from types import GetSetDescriptorType, TracebackType @@ -109,6 +108,7 @@ class XProxy(MutableMappingRole, AsyncContextManagerRole): no_type_check, overload, ) + from .utils.locals import LocalStack # XXX compat if typing.TYPE_CHECKING: # pragma: no cover @@ -117,57 +117,56 @@ class XProxy(MutableMappingRole, AsyncContextManagerRole): from .utils.typing import AsyncContextManager, AsyncGenerator # noqa __all__ = [ - 'LocalStack', - 'Proxy', - 'AwaitableRole', - 'AwaitableProxy', - 'CoroutineRole', - 'CoroutineProxy', - 'AsyncIterableRole', - 'AsyncIterableProxy', - 'AsyncIteratorRole', - 'AsyncIteratorProxy', - 'AsyncGeneratorRole', - 'AsyncGeneratorProxy', - 'SequenceRole', - 'SequenceProxy', - 'MutableSequenceRole', - 'MutableSequenceProxy', - 'SetRole', - 'SetProxy', - 'MutableSetRole', - 'MutableSetProxy', - 'ContextManagerRole', - 'ContextManagerProxy', - 'AsyncContextManagerRole', - 'AsyncContextManagerProxy', - 'MappingRole', - 'MappingProxy', - 'MutableMappingRole', - 'MutableMappingProxy', - 'CallableRole', - 'CallableProxy', - 'maybe_evaluate', + "LocalStack", + "Proxy", + "AwaitableRole", + "AwaitableProxy", + "CoroutineRole", + "CoroutineProxy", + "AsyncIterableRole", + "AsyncIterableProxy", + "AsyncIteratorRole", + "AsyncIteratorProxy", + "AsyncGeneratorRole", + "AsyncGeneratorProxy", + "SequenceRole", + "SequenceProxy", + "MutableSequenceRole", + "MutableSequenceProxy", + "SetRole", + "SetProxy", + "MutableSetRole", + "MutableSetProxy", + "ContextManagerRole", + "ContextManagerProxy", + "AsyncContextManagerRole", + "AsyncContextManagerProxy", + "MappingRole", + "MappingProxy", + "MutableMappingRole", + "MutableMappingProxy", + "CallableRole", + "CallableProxy", + "maybe_evaluate", ] -PYPY = hasattr(sys, 'pypy_version_info') +PYPY = hasattr(sys, "pypy_version_info") SLOTS_ISSUE_PRESENT = sys.version_info < (3, 7) -T = TypeVar('T') -S = TypeVar('S') -T_co = TypeVar('T_co', covariant=True) -V_co = TypeVar('V_co', covariant=True) -VT_co = TypeVar('VT_co', covariant=True) -T_contra = TypeVar('T_contra', contravariant=True) +T = TypeVar("T") +S = TypeVar("S") +T_co = TypeVar("T_co", covariant=True) +V_co = TypeVar("V_co", covariant=True) +VT_co = TypeVar("VT_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) -KT = TypeVar('KT') -VT = TypeVar('VT') +KT = TypeVar("KT") +VT = TypeVar("VT") def _default_cls_attr( - name: str, - type_: Type, - cls_value: Any) -> Callable[[Type], GetSetDescriptorType]: + name: str, type_: Type, cls_value: Any +) -> Callable[[Type], GetSetDescriptorType]: # Proxy uses properties to forward the standard # class attributes __module__, __name__ and __doc__ to the real # object, but these needs to be a string when accessed from @@ -182,10 +181,14 @@ def __new__(cls: Type, getter: Callable) -> Any: def __get__(self: Type, obj: Any, cls: Type = None) -> Any: return self.__getter(obj) if obj is not None else self - return type(name, (type_,), { - '__new__': __new__, - '__get__': __get__, - }) + return type( + name, + (type_,), + { + "__new__": __new__, + "__get__": __get__, + }, + ) class Proxy(Generic[T]): @@ -196,11 +199,11 @@ class Proxy(Generic[T]): # Code initially stolen from werkzeug.local.Proxy. if not SLOTS_ISSUE_PRESENT and not PYPY: # pragma: no cover __slots__ = ( - '__local', - '__args', - '__kwargs', - '__finalizers', - '__dict__', + "__local", + "__args", + "__kwargs", + "__finalizers", + "__dict__", ) def __init_subclass__(self, source: Type[T] = None) -> None: @@ -215,53 +218,53 @@ def __init_subclass__(self, source: Type[T] = None) -> None: @classmethod def _init_from_source(cls, source: Type[T]) -> None: # source must have metaclass ABCMeta - abstractmethods = getattr(source, '__abstractmethods__', None) + abstractmethods = getattr(source, "__abstractmethods__", None) if abstractmethods is None: - raise TypeError('class is not using metaclass ABCMeta') + raise TypeError("class is not using metaclass ABCMeta") for method_name in abstractmethods: - setattr(cls, method_name, - cls._generate_proxy_method(source, method_name)) + setattr(cls, method_name, cls._generate_proxy_method(source, method_name)) @classmethod - def _generate_proxy_method( - cls, source: Type[T], method_name: str) -> Callable: - + def _generate_proxy_method(cls, source: Type[T], method_name: str) -> Callable: @wraps(getattr(source, method_name)) def _classmethod(self: Proxy[T], *args: Any, **kwargs: Any) -> Any: obj = self._get_current_object() return getattr(obj, method_name)(*args, **kwargs) + _classmethod.__isabstractmethod__ = False # type: ignore return _classmethod - def __init__(self, - local: Callable[..., T], - args: Tuple = None, - kwargs: Dict = None, - name: str = None, - cache: bool = False, - __doc__: str = None) -> None: - object.__setattr__(self, '_Proxy__local', local) - object.__setattr__(self, '_Proxy__args', args or ()) - object.__setattr__(self, '_Proxy__kwargs', kwargs or {}) - object.__setattr__(self, '_Proxy__cached', cache) - object.__setattr__(self, '_Proxy__finalizers', deque()) + def __init__( + self, + local: Callable[..., T], + args: Tuple = None, + kwargs: Dict = None, + name: str = None, + cache: bool = False, + __doc__: str = None, + ) -> None: + object.__setattr__(self, "_Proxy__local", local) + object.__setattr__(self, "_Proxy__args", args or ()) + object.__setattr__(self, "_Proxy__kwargs", kwargs or {}) + object.__setattr__(self, "_Proxy__cached", cache) + object.__setattr__(self, "_Proxy__finalizers", deque()) if name is not None: - object.__setattr__(self, '__custom_name__', name) + object.__setattr__(self, "__custom_name__", name) if __doc__ is not None: - object.__setattr__(self, '__doc__', __doc__) + object.__setattr__(self, "__doc__", __doc__) - def _add_proxy_finalizer(self, fun: 'Proxy') -> None: - finalizers = object.__getattribute__(self, '_Proxy__finalizers') + def _add_proxy_finalizer(self, fun: "Proxy") -> None: + finalizers = object.__getattribute__(self, "_Proxy__finalizers") finalizers.append(fun) def _call_proxy_finalizers(self) -> None: - finalizers = object.__getattribute__(self, '_Proxy__finalizers') + finalizers = object.__getattribute__(self, "_Proxy__finalizers") while finalizers: finalizer = finalizers.popleft() finalizer._get_current_object() # evaluate - @_default_cls_attr('name', str, __name__) + @_default_cls_attr("name", str, __name__) @no_type_check def __name__(self) -> str: try: @@ -269,12 +272,12 @@ def __name__(self) -> str: except AttributeError: return self._get_current_object().__name__ - @_default_cls_attr('module', str, __name__) + @_default_cls_attr("module", str, __name__) @no_type_check def __module__(self) -> str: return self._get_current_object().__module__ - @_default_cls_attr('doc', str, __doc__) + @_default_cls_attr("doc", str, __doc__) @no_type_check def __doc__(self) -> Optional[str]: return cast(str, self._get_current_object().__doc__) @@ -298,18 +301,18 @@ def _get_current_object(self) -> T: you want to pass the object into a different context. """ try: - return cast(T, object.__getattribute__(self, '__cache')) + return cast(T, object.__getattribute__(self, "__cache")) except AttributeError: return self.__evaluate__() - def __evaluate__(self, - _clean: Tuple[str, ...] = ('_Proxy__local', - '_Proxy__args', - '_Proxy__kwargs')) -> T: + def __evaluate__( + self, + _clean: Tuple[str, ...] = ("_Proxy__local", "_Proxy__args", "_Proxy__kwargs"), + ) -> T: thing = self._evaluate_proxy() - cached = object.__getattribute__(self, '_Proxy__cached') + cached = object.__getattribute__(self, "_Proxy__cached") if cached: - object.__setattr__(self, '__cache', thing) + object.__setattr__(self, "__cache", thing) for attr in _clean: try: object.__delattr__(self, attr) @@ -320,18 +323,18 @@ def __evaluate__(self, def _evaluate_proxy(self) -> T: self._call_proxy_finalizers() - loc = object.__getattribute__(self, '_Proxy__local') - if not hasattr(loc, '__release_local__'): + loc = object.__getattribute__(self, "_Proxy__local") + if not hasattr(loc, "__release_local__"): return cast(T, loc(*self.__args, **self.__kwargs)) try: # pragma: no cover # not sure what this is about return cast(T, getattr(loc, self.__name__)) except AttributeError: # pragma: no cover - raise RuntimeError('no object bound to {0.__name__}'.format(self)) + raise RuntimeError("no object bound to {0.__name__}".format(self)) def __evaluated__(self) -> bool: try: - object.__getattribute__(self, '__cache') + object.__getattribute__(self, "__cache") except AttributeError: return False return True @@ -344,13 +347,13 @@ def __dict__(self) -> Dict[str, Any]: # type: ignore try: return self._get_current_object().__dict__ except RuntimeError: # pragma: no cover - raise AttributeError('__dict__') + raise AttributeError("__dict__") def __repr__(self) -> str: try: obj = self._get_current_object() except RuntimeError: # pragma: no cover - return '<{0} unbound>'.format(self.__class__.__name__) + return "<{0} unbound>".format(self.__class__.__name__) return repr(obj) def __bool__(self) -> bool: @@ -358,6 +361,7 @@ def __bool__(self) -> bool: return bool(self._get_current_object()) except RuntimeError: # pragma: no cover return False + __nonzero__ = __bool__ # Py2 def __dir__(self) -> List[str]: @@ -367,7 +371,7 @@ def __dir__(self) -> List[str]: return [] def __getattr__(self, name: str) -> Any: - if name == '__members__': + if name == "__members__": return dir(self._get_current_object()) return getattr(self._get_current_object(), name) @@ -421,18 +425,21 @@ def __await__(self) -> Generator[Any, None, V_co]: def send(self, value: T_contra) -> T_co: return self._get_coroutine().send(value) - def throw(self, - typ: Type[BaseException], - val: Optional[BaseException] = None, - tb: TracebackType = None) -> T_co: + def throw( + self, + typ: Type[BaseException], + val: Optional[BaseException] = None, + tb: TracebackType = None, + ) -> T_co: return self._get_coroutine().throw(typ, val, tb) def close(self) -> None: return self._get_coroutine().close() -class CoroutineProxy(Proxy[Coroutine[T_co, T_contra, V_co]], - CoroutineRole[T_co, T_contra, V_co]): +class CoroutineProxy( + Proxy[Coroutine[T_co, T_contra, V_co]], CoroutineRole[T_co, T_contra, V_co] +): """Proxy to :class:`typing.Coroutine` object.""" @@ -447,8 +454,7 @@ def __aiter__(self) -> AsyncIterator[T_co]: return self._get_iterable().__aiter__() -class AsyncIterableProxy(Proxy[AsyncIterable[T_co]], - AsyncIterableRole[T_co]): +class AsyncIterableProxy(Proxy[AsyncIterable[T_co]], AsyncIterableRole[T_co]): """Proxy to :class:`typing.AsyncIterable` object.""" @@ -466,8 +472,7 @@ def __anext__(self) -> Awaitable[T_co]: return self._get_iterator().__anext__() -class AsyncIteratorProxy(Proxy[AsyncIterator[T_co]], - AsyncIteratorRole[T_co]): +class AsyncIteratorProxy(Proxy[AsyncIterator[T_co]], AsyncIteratorRole[T_co]): """Proxy to :class:`typing.AsyncIterator` object.""" @@ -484,10 +489,12 @@ def __anext__(self) -> Awaitable[T_co]: def asend(self, value: T_contra) -> Awaitable[T_co]: return self._get_generator().asend(value) - def athrow(self, - typ: Type[BaseException], - val: Optional[BaseException] = None, - tb: TracebackType = None) -> Awaitable[T_co]: + def athrow( + self, + typ: Type[BaseException], + val: Optional[BaseException] = None, + tb: TracebackType = None, + ) -> Awaitable[T_co]: return self._get_generator().athrow(typ, val, tb) def aclose(self) -> Awaitable[None]: @@ -497,8 +504,9 @@ def __aiter__(self) -> AsyncGenerator[T_co, T_contra]: return self._get_generator().__aiter__() -class AsyncGeneratorProxy(Proxy[AsyncGenerator[T_co, T_contra]], - AsyncGeneratorRole[T_co, T_contra]): +class AsyncGeneratorProxy( + Proxy[AsyncGenerator[T_co, T_contra]], AsyncGeneratorRole[T_co, T_contra] +): """Proxy to :class:`typing.AsyncGenerator` object.""" @@ -539,8 +547,7 @@ def __len__(self) -> int: return self._get_sequence().__len__() -class SequenceProxy(Proxy[Sequence[T_co]], - SequenceRole[T_co]): +class SequenceProxy(Proxy[Sequence[T_co]], SequenceRole[T_co]): """Proxy to :class:`typing.Sequence` object.""" @@ -595,8 +602,7 @@ def __iadd__(self, x: Iterable[T]) -> MutableSequence[T]: return self._get_sequence().__iadd__(x) -class MutableSequenceProxy(Proxy[MutableSequence[T_co]], - MutableSequenceRole[T_co]): +class MutableSequenceProxy(Proxy[MutableSequence[T_co]], MutableSequenceRole[T_co]): """Proxy to :class:`typing.MutableSequence` object.""" @@ -644,8 +650,7 @@ def __len__(self) -> int: return self._get_set().__len__() -class SetProxy(Proxy[AbstractSet[T_co]], - SetRole[T_co]): +class SetProxy(Proxy[AbstractSet[T_co]], SetRole[T_co]): """Proxy to :class:`typing.AbstractSet` object.""" @@ -684,8 +689,7 @@ def __isub__(self, s: AbstractSet[Any]) -> MutableSet[T]: return self._get_set().__isub__(s) -class MutableSetProxy(Proxy[MutableSet[T_co]], - MutableSetRole[T_co]): +class MutableSetProxy(Proxy[MutableSet[T_co]], MutableSetRole[T_co]): """Proxy to :class:`typing.MutableSet` object.""" @@ -703,8 +707,7 @@ def __exit__(self, *exc_info: Any) -> Any: return self._get_context().__exit__(*exc_info) -class ContextManagerProxy(Proxy[ContextManager[T]], - ContextManagerRole[T]): +class ContextManagerProxy(Proxy[ContextManager[T]], ContextManagerRole[T]): """Proxy to :class:`typing.ContextManager` object.""" @@ -716,17 +719,19 @@ def __aenter__(self) -> Awaitable[T_co]: return cast(Awaitable[T_co], obj.__aenter__()) def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType]) -> Awaitable[Optional[bool]]: + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> Awaitable[Optional[bool]]: obj = self._get_current_object() # type: ignore val = obj.__aexit__(exc_type, exc_value, traceback) return cast(Awaitable[Optional[bool]], val) -class AsyncContextManagerProxy(Proxy[AsyncContextManager[T_co]], - AsyncContextManagerRole[T_co]): +class AsyncContextManagerProxy( + Proxy[AsyncContextManager[T_co]], AsyncContextManagerRole[T_co] +): """Proxy to :class:`typing.AsyncContextManager` object.""" @@ -745,8 +750,7 @@ def get(self, k: KT) -> Optional[VT_co]: ... @overload # noqa: F811 - def get(self, k: KT, # noqa: F811 - default: Union[VT_co, T]) -> Union[VT_co, T]: + def get(self, k: KT, default: Union[VT_co, T]) -> Union[VT_co, T]: # noqa: F811 ... def get(self, *args: Any, **kwargs: Any) -> Any: # noqa: F811 @@ -771,8 +775,7 @@ def __len__(self) -> int: return self._get_mapping().__len__() -class MappingProxy(Proxy[Mapping[KT, VT_co]], - MappingRole[KT, VT_co]): +class MappingProxy(Proxy[Mapping[KT, VT_co]], MappingRole[KT, VT_co]): """Proxy to :class:`typing.Mapping` object.""" @@ -797,8 +800,7 @@ def pop(self, k: KT) -> VT: ... @overload # noqa: F811 - def pop(self, k: KT, # noqa: F811 - default: Union[VT, T] = ...) -> Union[VT, T]: + def pop(self, k: KT, default: Union[VT, T] = ...) -> Union[VT, T]: # noqa: F811 ... def pop(self, *args: Any, **kwargs: Any) -> Any: # noqa: F811 @@ -815,8 +817,7 @@ def update(self, __m: Mapping[KT, VT], **kwargs: VT) -> None: ... @overload # noqa: F811 - def update(self, __m: Iterable[Tuple[KT, VT]], # noqa: F811 - **kwargs: VT) -> None: + def update(self, __m: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: # noqa: F811 ... @overload # noqa: F811 @@ -827,8 +828,7 @@ def update(self, *args: Any, **kwargs: Any) -> None: # noqa: F811 self._get_mapping().update(*args, **kwargs) -class MutableMappingProxy(Proxy[MutableMapping[KT, VT]], - MutableMappingRole[KT, VT]): +class MutableMappingProxy(Proxy[MutableMapping[KT, VT]], MutableMappingRole[KT, VT]): """Proxy to :class:`typing.MutableMapping` object.""" diff --git a/mode/loop/__init__.py b/mode/loop/__init__.py index 6e1f4432..fe6ae4fe 100644 --- a/mode/loop/__init__.py +++ b/mode/loop/__init__.py @@ -51,13 +51,13 @@ import importlib from typing import Mapping, Optional -__all__ = ['LOOPS', 'use'] +__all__ = ["LOOPS", "use"] LOOPS: Mapping[str, Optional[str]] = { - 'aio': None, - 'eventlet': 'mode.loop.eventlet', - 'gevent': 'mode.loop.gevent', - 'uvloop': 'mode.loop.uvloop', + "aio": None, + "eventlet": "mode.loop.eventlet", + "gevent": "mode.loop.gevent", + "uvloop": "mode.loop.uvloop", } diff --git a/mode/loop/_gevent_loop.py b/mode/loop/_gevent_loop.py index 291879a6..912177e7 100644 --- a/mode/loop/_gevent_loop.py +++ b/mode/loop/_gevent_loop.py @@ -1,6 +1,7 @@ """Gevent loop customizations.""" import asyncio from typing import Any + import gevent.core diff --git a/mode/loop/eventlet.py b/mode/loop/eventlet.py index a80414db..1f77a24a 100644 --- a/mode/loop/eventlet.py +++ b/mode/loop/eventlet.py @@ -1,12 +1,14 @@ """Enable :pypi:`eventlet` support for :mod:`asyncio`.""" +import asyncio # noqa: E402,I100,I202 import os -os.environ['GEVENT_LOOP'] = 'mode.loop._gevent_loop.Loop' + +os.environ["GEVENT_LOOP"] = "mode.loop._gevent_loop.Loop" try: import eventlet except ImportError: raise ImportError( - 'Eventlet loop requires the eventlet library: ' - 'pip install eventlet') from None + "Eventlet loop requires the eventlet library: " "pip install eventlet" + ) from None eventlet.monkey_patch() try: @@ -14,12 +16,12 @@ except ImportError: raise raise ImportError( - 'Eventlet loop requires the aioeventlet library: ' - 'pip install aioeventlet') from None + "Eventlet loop requires the aioeventlet library: " "pip install aioeventlet" + ) from None + -import asyncio # noqa: E402,I100,I202 if asyncio._get_running_loop() is not None: - raise RuntimeError('Event loop created before importing eventlet loop!') + raise RuntimeError("Event loop created before importing eventlet loop!") Policy = aioeventlet.EventLoopPolicy policy = Policy() diff --git a/mode/loop/gevent.py b/mode/loop/gevent.py index 8af1254a..14d28fa7 100644 --- a/mode/loop/gevent.py +++ b/mode/loop/gevent.py @@ -1,16 +1,18 @@ """Enable :pypi:`gevent` support for :mod:`asyncio`.""" +import asyncio # noqa: E402,I100,I202 import os import warnings -os.environ['GEVENT_LOOP'] = 'mode.loop._gevent_loop.Loop' +from typing import Optional, cast # noqa: F401,E402 + +os.environ["GEVENT_LOOP"] = "mode.loop._gevent_loop.Loop" try: import gevent import gevent.monkey except ImportError: raise ImportError( - 'Gevent loop requires the gevent library: ' - 'pip install gevent') from None + "Gevent loop requires the gevent library: " "pip install gevent" + ) from None gevent.monkey.patch_all() -from typing import Optional, cast # noqa: F401,E402 try: import psycopg2 # noqa: F401 @@ -20,8 +22,7 @@ try: import psycogreen.gevent except ImportError: - warnings.warn( - 'psycopg2 installed, but not psycogreen: pg will be blocking') + warnings.warn("psycopg2 installed, but not psycogreen: pg will be blocking") else: psycogreen.gevent.patch_psycopg() @@ -30,12 +31,12 @@ except ImportError: raise raise ImportError( - 'Gevent loop requires the aiogevent library: ' - 'pip install aiogevent') from None + "Gevent loop requires the aiogevent library: " "pip install aiogevent" + ) from None + -import asyncio # noqa: E402,I100,I202 if asyncio._get_running_loop() is not None: - raise RuntimeError('Event loop created before importing gevent loop!') + raise RuntimeError("Event loop created before importing gevent loop!") class Policy(aiogevent.EventLoopPolicy): # type: ignore diff --git a/mode/loop/uvloop.py b/mode/loop/uvloop.py index 84ff3532..145b780c 100644 --- a/mode/loop/uvloop.py +++ b/mode/loop/uvloop.py @@ -1,4 +1,6 @@ """Enable :pypi:`uvloop` as the event loop for :mod:`asyncio`.""" import asyncio + import uvloop + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) diff --git a/mode/proxy.py b/mode/proxy.py index f13c4762..d92bdcce 100644 --- a/mode/proxy.py +++ b/mode/proxy.py @@ -4,12 +4,13 @@ """ import abc from typing import Any, ContextManager, Optional + from .services import ServiceBase from .types import ServiceT -from .utils.typing import AsyncContextManager from .utils.types.trees import NodeT +from .utils.typing import AsyncContextManager -__all__ = ['ServiceProxy'] +__all__ = ["ServiceProxy"] class ServiceProxy(ServiceBase): diff --git a/mode/services.py b/mode/services.py index 5a7b31b1..06131980 100644 --- a/mode/services.py +++ b/mode/services.py @@ -2,7 +2,7 @@ import asyncio import logging import sys - +from datetime import tzinfo from functools import wraps from time import monotonic, perf_counter from types import TracebackType @@ -31,6 +31,7 @@ from .timers import Timer from .types import DiagT, ServiceT from .utils.contexts import AsyncExitStack, ExitStack +from .utils.cron import secs_for_next from .utils.locks import Event from .utils.logging import CompositeLogger, get_logger, level_number from .utils.objects import iter_mro_reversed, qualname @@ -38,15 +39,15 @@ from .utils.times import Seconds, want_seconds from .utils.tracebacks import format_task_stack from .utils.trees import Node -from .utils.typing import AsyncContextManager from .utils.types.trees import NodeT +from .utils.typing import AsyncContextManager __all__ = [ - 'ServiceBase', - 'Service', - 'Diag', - 'task', - 'timer', + "ServiceBase", + "Service", + "Diag", + "task", + "timer", ] ClockArg = Callable[[], float] @@ -104,7 +105,7 @@ def __init_subclass__(self) -> None: def _init_subclass_logger(cls) -> None: # make sure class has a logger. logger = cast(Optional[logging.Logger], cls.logger) - if logger is None or getattr(logger, '__modex__', False): + if logger is None or getattr(logger, "__modex__", False): _logger = cls.logger = get_logger(cls.__module__) _logger.__modex__ = True # type: ignore @@ -112,18 +113,19 @@ def __init__(self, *, loop: asyncio.AbstractEventLoop = None) -> None: self.log = CompositeLogger(self.logger, formatter=self._format_log) self._loop = loop - def _format_log(self, severity: int, msg: str, - *args: Any, **kwargs: Any) -> str: + def _format_log(self, severity: int, msg: str, *args: Any, **kwargs: Any) -> str: return f'[^{"-" * (self.beacon.depth - 1)}{self.shortlabel}]: {msg}' async def __aenter__(self) -> ServiceT: await self.start() return self - async def __aexit__(self, - exc_type: Type[BaseException] = None, - exc_val: BaseException = None, - exc_tb: TracebackType = None) -> Optional[bool]: + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: TracebackType = None, + ) -> Optional[bool]: try: await self.stop() finally: @@ -132,11 +134,11 @@ async def __aexit__(self, def __repr__(self) -> str: # Override _repr_info to add additional text to repr. - info = maybecat(self._repr_info(), prefix=' ') or '' - return f'<{self._repr_name()}: {self.state}{info}>' + info = maybecat(self._repr_info(), prefix=" ") or "" + return f"<{self._repr_name()}: {self.state}{info}>" def _repr_info(self) -> str: - return '' + return "" def _repr_name(self) -> str: return type(self).__name__ @@ -365,7 +367,7 @@ class Service(ServiceBase, ServiceCallbacks): #: The log level for mundane info such as `starting`, `stopping`, etc. #: Set this to ``"debug"`` for less information. - mundane_level = 'info' + mundane_level = "info" _mundane_level: int #: Event set when service started. @@ -403,9 +405,9 @@ class Service(ServiceBase, ServiceCallbacks): _tasks: ClassVar[Optional[Dict[str, Set[str]]]] = None @classmethod - def from_awaitable(cls, coro: Awaitable, *, - name: str = None, - **kwargs: Any) -> ServiceT: + def from_awaitable( + cls, coro: Awaitable, *, name: str = None, **kwargs: Any + ) -> ServiceT: return _AwaitableService(coro, name=name) @classmethod @@ -436,31 +438,63 @@ def timer(cls, interval: Seconds) -> Callable[[Callable], ServiceTask]: """ _interval = want_seconds(interval) - def _decorate( - fun: Callable[[ServiceT], Awaitable[None]]) -> ServiceTask: + def _decorate(fun: Callable[[ServiceT], Awaitable[None]]) -> ServiceTask: @wraps(fun) async def _repeater(self: Service) -> None: await self.sleep(_interval) - async for sleep_time in self.itertimer( - _interval, name=qualname(fun)): + async for sleep_time in self.itertimer(_interval, name=qualname(fun)): await fun(self) + return cls.task(_repeater) + + return _decorate + + @classmethod + def crontab( + cls, cron_format: str, *, timezone: tzinfo = None + ) -> Callable[[Callable], ServiceTask]: + """Background timer executing periodic task based on Crontab description. + + Example: + >>> class S(Service): + ... + ... @Service.crontab(cron_format='30 18 * * *', + timezone=pytz.timezone('US/Pacific')) + ... async def every_6_30_pm_pacific(self): + ... print('IT IS 6:30pm') + ... + ... @Service.crontab(cron_format='30 18 * * *') + ... async def every_6_30_pm(self): + ... print('6:30pm UTC') + """ + + def _decorate(fun: Callable[[ServiceT], Awaitable[None]]) -> ServiceTask: + @wraps(fun) + async def _cron_starter(self: Service) -> None: + while not self.should_stop: + await self.sleep(secs_for_next(cron_format, timezone)) + if not self.should_stop: + await fun(self) + + return cls.task(_cron_starter) + return _decorate @classmethod def transitions_to(cls, flag: str) -> Callable: """Decorate function to set and reset diagnostic flag.""" - def _decorate( - fun: Callable[..., Awaitable]) -> Callable[..., Awaitable]: + + def _decorate(fun: Callable[..., Awaitable]) -> Callable[..., Awaitable]: @wraps(fun) - async def _and_transition(self: ServiceT, - *args: Any, **kwargs: Any) -> Any: + async def _and_transition(self: ServiceT, *args: Any, **kwargs: Any) -> Any: self.diag.set_flag(flag) try: return await fun(self, *args, **kwargs) finally: self.diag.unset_flag(flag) + return _and_transition + return _decorate def __init_subclass__(self) -> None: @@ -485,7 +519,8 @@ def _init_subclass_tasks(cls) -> None: tasks: Set[str] = set() for base in iter_mro_reversed(cls, stop=Service): tasks |= { - attr_name for attr_name, attr_value in vars(base).items() + attr_name + for attr_name, attr_value in vars(base).items() if isinstance(attr_value, ServiceTask) } cls._tasks[clsid] = tasks @@ -503,11 +538,11 @@ def _get_tasks(self) -> Iterable[ServiceTask]: @classmethod def _get_class_id(cls) -> str: - return '.'.join([cls.__module__, cls.__qualname__]) + return ".".join([cls.__module__, cls.__qualname__]) - def __init__(self, *, - beacon: NodeT = None, - loop: asyncio.AbstractEventLoop = None) -> None: + def __init__( + self, *, beacon: NodeT = None, loop: asyncio.AbstractEventLoop = None + ) -> None: self.diag = self.Diag(self) self._loop = loop self._started = self._new_started_event() @@ -537,8 +572,9 @@ def _new_shutdown_event(self) -> Event: def _new_crashed_event(self) -> Event: return Event(loop=self._loop) - async def transition_with(self, flag: str, fut: Awaitable, - *args: Any, **kwargs: Any) -> Any: + async def transition_with( + self, flag: str, fut: Awaitable, *args: Any, **kwargs: Any + ) -> Any: self.diag.set_flag(flag) try: return await fut @@ -573,17 +609,15 @@ async def add_async_context(self, context: AsyncContextManager) -> Any: if isinstance(context, AsyncContextManager): return await self.async_exit_stack.enter_async_context(context) elif isinstance(context, ContextManager): # type: ignore - raise TypeError( - 'Use `self.add_context(ctx)` for non-async context') - raise TypeError(f'Not a context/async context: {type(context)!r}') + raise TypeError("Use `self.add_context(ctx)` for non-async context") + raise TypeError(f"Not a context/async context: {type(context)!r}") def add_context(self, context: ContextManager) -> Any: if isinstance(context, AsyncContextManager): - raise TypeError( - 'Use `await self.add_async_context(ctx)` for async context') + raise TypeError("Use `await self.add_async_context(ctx)` for async context") elif isinstance(context, ContextManager): return self.exit_stack.enter_context(context) - raise TypeError(f'Not a context/async context: {type(context)!r}') + raise TypeError(f"Not a context/async context: {type(context)!r}") def add_future(self, coro: Awaitable) -> asyncio.Future: """Add relationship to asyncio.Future. @@ -614,13 +648,18 @@ def tracebacks(self) -> Mapping[str, str]: } def human_tracebacks(self) -> str: - return '\n'.join([ - '\n'.join([ - name, - '=' * len(name), - tb, - ]) for name, tb in self.tracebacks().items() - ]) + return "\n".join( + [ + "\n".join( + [ + name, + "=" * len(name), + tb, + ] + ) + for name, tb in self.tracebacks().items() + ] + ) def _on_future_done(self, fut: asyncio.Future) -> None: self._futures.discard(fut) @@ -645,24 +684,22 @@ async def join_services(self, services: Sequence[ServiceT]) -> None: for service in reversed(services): await service.stop() - async def sleep(self, n: Seconds, *, - loop: asyncio.AbstractEventLoop = None) -> None: + async def sleep( + self, n: Seconds, *, loop: asyncio.AbstractEventLoop = None + ) -> None: """Sleep for ``n`` seconds, or until service stopped.""" try: await asyncio.wait_for( self._stopped.wait(), timeout=want_seconds(n), - loop=loop or self.loop, ) except asyncio.TimeoutError: pass - async def wait_for_stopped(self, *coros: WaitArgT, - timeout: Seconds = None) -> bool: + async def wait_for_stopped(self, *coros: WaitArgT, timeout: Seconds = None) -> bool: return (await self.wait(*coros, timeout=timeout)).stopped - async def wait(self, *coros: WaitArgT, - timeout: Seconds = None) -> WaitResult: + async def wait(self, *coros: WaitArgT, timeout: Seconds = None) -> WaitResult: """Wait for coroutines to complete, or until the service stops.""" if coros: assert len(coros) == 1 @@ -671,19 +708,19 @@ async def wait(self, *coros: WaitArgT, await self._wait_stopped(timeout=timeout) return WaitResult(None, True) - async def wait_many(self, coros: Iterable[WaitArgT], - *, - timeout: Seconds = None) -> WaitResult: + async def wait_many( + self, coros: Iterable[WaitArgT], *, timeout: Seconds = None + ) -> WaitResult: coro = asyncio.wait( cast(Iterable[Awaitable[Any]], coros), return_when=asyncio.ALL_COMPLETED, timeout=want_seconds(timeout), - loop=self.loop, ) return await self._wait_one(coro, timeout=timeout) - async def wait_first(self, *coros: WaitArgT, - timeout: Seconds = None) -> WaitResults: + async def wait_first( + self, *coros: WaitArgT, timeout: Seconds = None + ) -> WaitResults: _coros: Mapping[WaitArgT, FutureT] timeout = want_seconds(timeout) if timeout is not None else None stopped = self._stopped @@ -708,7 +745,6 @@ async def wait_first(self, *coros: WaitArgT, futures.values(), return_when=asyncio.FIRST_COMPLETED, timeout=timeout, - loop=self.loop, ) for f in done: if f.done() and f.exception() is not None: @@ -731,9 +767,7 @@ async def wait_first(self, *coros: WaitArgT, if not fut.done(): fut.cancel() - async def _wait_one(self, coro: WaitArgT, - *, - timeout: Seconds = None) -> WaitResult: + async def _wait_one(self, coro: WaitArgT, *, timeout: Seconds = None) -> WaitResult: results = await self.wait_first(coro, timeout=timeout) if results.stopped: return WaitResult(None, True) @@ -747,7 +781,6 @@ async def _wait_stopped(self, timeout: Seconds = None) -> None: [stopped, crashed], return_when=asyncio.FIRST_COMPLETED, timeout=timeout, - loop=self.loop, ) for fut in done: fut.result() # propagate exceptions @@ -779,7 +812,7 @@ async def _actually_start(self) -> None: if self.should_stop: break try: - self._log_mundane('Starting...') + self._log_mundane("Starting...") await self.on_start() if self.should_stop: break @@ -790,7 +823,7 @@ async def _actually_start(self) -> None: await child.maybe_start() if self.should_stop: break - self.log.debug('Started.') + self.log.debug("Started.") await self.on_started() except BaseException: self.exit_stack.__exit__(*sys.exc_info()) @@ -802,10 +835,10 @@ async def _execute_task(self, task: Awaitable) -> None: await task except asyncio.CancelledError: if not self.should_stop: - self._log_mundane('Terminating cancelled task: %r', task) + self._log_mundane("Terminating cancelled task: %r", task) except RuntimeError as exc: - if 'Event loop is closed' in str(exc): - self.log.info('Cancelled task %r: %s', task, exc) + if "Event loop is closed" in str(exc): + self.log.info("Cancelled task %r: %s", task, exc) else: await self.crash(exc) except BaseException as exc: @@ -824,7 +857,7 @@ def _log_mundane(self, msg: str, *args: Any, **kwargs: Any) -> None: async def crash(self, reason: BaseException) -> None: """Crash the service and all child services.""" - self.log.exception('Crashed reason=%r', reason) + self.log.exception("Crashed reason=%r", reason) if not self._crashed.is_set(): # We record the stack by raising the exception. @@ -837,8 +870,7 @@ async def crash(self, reason: BaseException) -> None: seen: Set[NodeT] = set() for node in self.beacon.walk(): if node in seen: - self.log.warning( - 'Recursive loop in beacon: %r: %r', node, seen) + self.log.warning("Recursive loop in beacon: %r: %r", node, seen) if root is not None and root.data is not self: cast(Service, root.data)._crash(reason) break @@ -857,21 +889,22 @@ def _crash(self, reason: BaseException) -> None: async def stop(self) -> None: """Stop the service.""" if not self._stopped.is_set(): - self._log_mundane('Stopping...') + self._log_mundane("Stopping...") self._stopped_set() await self.on_stop() await self._stop_children() - self.log.debug('Shutting down...') + self.log.debug("Shutting down...") if self.wait_for_shutdown: - self.log.debug('Waiting for shutdown') + self.log.debug("Waiting for shutdown") await asyncio.wait_for( - self._shutdown.wait(), self.shutdown_timeout, + self._shutdown.wait(), + self.shutdown_timeout, ) - self.log.debug('Shutting down now') + self.log.debug("Shutting down now") await self._stop_futures() await self._stop_exit_stacks() await self.on_shutdown() - self.log.debug('-Stopped!') + self.log.debug("-Stopped!") def _stopped_set(self) -> None: self._stopped.set() @@ -887,8 +920,7 @@ async def _default_stop_children(self) -> None: except asyncio.CancelledError: pass except Exception as exc: - self.log.exception( - 'Error while stopping child %r: %r', child, exc) + self.log.exception("Error while stopping child %r: %r", child, exc) async def _stop_futures(self) -> None: await self._default_stop_futures() @@ -935,7 +967,6 @@ async def _wait_for_futures(self, *, timeout: float = None) -> None: await asyncio.wait( self._futures, return_when=asyncio.ALL_COMPLETED, - loop=self.loop, timeout=timeout, ) @@ -948,10 +979,7 @@ async def restart(self) -> None: def service_reset(self) -> None: self.restart_count += 1 - for ev in (self._started, - self._stopped, - self._shutdown, - self._crashed): + for ev in (self._started, self._stopped, self._shutdown, self._crashed): ev.clear() self.crash_reason = None for child in self._children: @@ -971,13 +999,16 @@ def set_shutdown(self) -> None: """ self._shutdown.set() - async def itertimer(self, - interval: Seconds, *, - max_drift_correction: float = 0.1, - loop: asyncio.AbstractEventLoop = None, - sleep: Callable[..., Awaitable] = None, - clock: ClockArg = perf_counter, - name: str = '') -> AsyncIterator[float]: + async def itertimer( + self, + interval: Seconds, + *, + max_drift_correction: float = 0.1, + loop: asyncio.AbstractEventLoop = None, + sleep: Callable[..., Awaitable] = None, + clock: ClockArg = perf_counter, + name: str = "", + ) -> AsyncIterator[float]: """Sleep ``interval`` seconds for every iteration. This is an async iterator that takes advantage @@ -1001,11 +1032,12 @@ async def itertimer(self, return try: async for sleep_time in Timer( - interval, - name=name, - max_drift_correction=max_drift_correction, - clock=clock, - sleep=sleepfun): + interval, + name=name, + max_drift_correction=max_drift_correction, + clock=clock, + sleep=sleepfun, + ): if self.should_stop: break yield sleep_time @@ -1034,15 +1066,15 @@ def should_stop(self) -> bool: def state(self) -> str: """Service state - as a human readable string.""" if self._crashed.is_set(): - return 'crashed' + return "crashed" elif not self._started.is_set(): - return 'init' + return "init" elif not self._stopped.is_set(): - return 'running' + return "running" elif not self._shutdown.is_set(): - return 'stopping' + return "stopping" else: - return 'shutdown' + return "shutdown" @property def label(self) -> str: @@ -1077,13 +1109,11 @@ def crash_reason(self, reason: Optional[BaseException]) -> None: class _AwaitableService(Service): - mundane_level = 'debug' + mundane_level = "debug" _fut: Optional[asyncio.Future] - def __init__(self, coro: Awaitable, *, - name: str = None, - **kwargs: Any) -> None: + def __init__(self, coro: Awaitable, *, name: str = None, **kwargs: Any) -> None: self.coro = coro self._fut = None self.name = name diff --git a/mode/signals.py b/mode/signals.py index c7fa0738..2ba5a9d1 100644 --- a/mode/signals.py +++ b/mode/signals.py @@ -30,7 +30,7 @@ ) from .utils.futures import maybe_async -__all__ = ['BaseSignal', 'Signal', 'SyncSignal'] +__all__ = ["BaseSignal", "Signal", "SyncSignal"] class BaseSignal(BaseSignalT[T]): @@ -39,14 +39,17 @@ class BaseSignal(BaseSignalT[T]): _receivers: MutableSet[SignalHandlerRefT] _filter_receivers: FilterReceiverMapping - def __init__(self, *, - name: str = None, - owner: Type = None, - loop: asyncio.AbstractEventLoop = None, - default_sender: Any = None, - receivers: MutableSet[SignalHandlerRefT] = None, - filter_receivers: FilterReceiverMapping = None) -> None: - self.name = name or '' + def __init__( + self, + *, + name: str = None, + owner: Type = None, + loop: asyncio.AbstractEventLoop = None, + default_sender: Any = None, + receivers: MutableSet[SignalHandlerRefT] = None, + filter_receivers: FilterReceiverMapping = None, + ) -> None: + self.name = name or "" self.owner = owner self.loop = loop self.default_sender = default_sender @@ -57,10 +60,10 @@ def __init__(self, *, def asdict(self) -> Mapping[str, Any]: return { - 'name': self.name, - 'owner': self.owner, - 'loop': self.loop, - 'default_sender': self.default_sender, + "name": self.name, + "owner": self.owner, + "loop": self.loop, + "default_sender": self.default_sender, } def clone(self, **kwargs: Any) -> BaseSignalT: @@ -98,7 +101,7 @@ def unpack_sender_from_args(self, *args: Any) -> Tuple[T, Tuple[Any, ...]]: sender = self.default_sender if sender is None: if not args: - raise TypeError('Signal.send requires at least one argument') + raise TypeError("Signal.send requires at least one argument") if len(args) > 1: sender, *args = args # type: ignore else: @@ -110,10 +113,9 @@ def connect(self, fun: SignalHandlerT = None, **kwargs: Any) -> Callable: return self._connect(fun, **kwargs) return partial(self._connect, **kwargs) - def _connect(self, fun: SignalHandlerT, - *, - weak: bool = False, - sender: Any = None) -> SignalHandlerT: + def _connect( + self, fun: SignalHandlerT, *, weak: bool = False, sender: Any = None + ) -> SignalHandlerT: ref: SignalHandlerRefT ref = self._create_ref(fun) if weak else lambda: fun if self.default_sender is not None: @@ -124,10 +126,9 @@ def _connect(self, fun: SignalHandlerT, self._filter_receivers[self._create_id(sender)].add(ref) return fun - def disconnect(self, fun: SignalHandlerT, - *, - weak: bool = False, - sender: Any = None) -> None: + def disconnect( + self, fun: SignalHandlerT, *, weak: bool = False, sender: Any = None + ) -> None: ref: SignalHandlerRefT = self._create_ref(fun) if weak else lambda: fun if self.default_sender is not None: sender = self.default_sender @@ -144,21 +145,21 @@ def iter_receivers(self, sender: T_contra) -> Iterable[SignalHandlerT]: r = self._update_receivers(self._receivers) if sender is not None: sender_id = self._create_id(sender) - r.update(self._update_receivers( - self._filter_receivers[sender_id])) + r.update(self._update_receivers(self._filter_receivers[sender_id])) for receiver in r: yield receiver def _update_receivers( - self, r: MutableSet[SignalHandlerRefT]) -> Set[SignalHandlerT]: + self, r: MutableSet[SignalHandlerRefT] + ) -> Set[SignalHandlerT]: live_receivers, dead_refs = self._get_live_receivers(r) for href in dead_refs: r.discard(href) return live_receivers def _get_live_receivers( - self, r: MutableSet[SignalHandlerRefT]) -> Tuple[ - Set[SignalHandlerT], Set[SignalHandlerRefT]]: + self, r: MutableSet[SignalHandlerRefT] + ) -> Tuple[Set[SignalHandlerT], Set[SignalHandlerRefT]]: live_receivers: Set[SignalHandlerT] = set() dead_refs: Set[SignalHandlerRefT] = set() for href in r: @@ -170,15 +171,15 @@ def _get_live_receivers( return live_receivers, dead_refs def _is_alive( - self, - ref: SignalHandlerRefT) -> Tuple[bool, Optional[SignalHandlerT]]: + self, ref: SignalHandlerRefT + ) -> Tuple[bool, Optional[SignalHandlerT]]: if isinstance(ref, ReferenceType): value = ref() return value is not None, value return True, ref() def _create_ref(self, fun: SignalHandlerT) -> SignalHandlerRefT: - if hasattr(fun, '__func__') and hasattr(fun, '__self__'): + if hasattr(fun, "__func__") and hasattr(fun, "__self__"): return cast(SignalHandlerRefT, WeakMethod(cast(MethodType, fun))) else: return ref(fun) @@ -197,14 +198,14 @@ def ident(self) -> str: @property def label(self) -> str: if self.owner: - return f'{self.owner.__qualname__}.{self.name}' + return f"{self.owner.__qualname__}.{self.name}" return self.name def __repr__(self) -> str: - info = '' + info = "" if self.default_sender: - info = f' sender={self.default_sender!r}' - return f'<{type(self).__name__}: {self.label}{info}>' + info = f" sender={self.default_sender!r}" + return f"<{type(self).__name__}: {self.label}{info}>" class Signal(BaseSignal[T], SignalT[T]): diff --git a/mode/supervisors.py b/mode/supervisors.py index 29f15682..98ebd4ac 100644 --- a/mode/supervisors.py +++ b/mode/supervisors.py @@ -17,11 +17,11 @@ from .utils.times import Bucket, Seconds, rate_limit, want_seconds __all__ = [ - 'ForfeitOneForAllSupervisor', - 'ForfeitOneForOneSupervisor', - 'SupervisorStrategy', - 'OneForOneSupervisor', - 'OneForAllSupervisor', + "ForfeitOneForAllSupervisor", + "ForfeitOneForOneSupervisor", + "SupervisorStrategy", + "OneForOneSupervisor", + "OneForAllSupervisor", ] logger = get_logger(__name__) @@ -45,14 +45,15 @@ class SupervisorStrategy(Service, SupervisorStrategyT): # This is needed for Faust and the @app.agent(concurrency=n) feature. _index: Dict[ServiceT, int] - def __init__(self, - *services: ServiceT, - max_restarts: Seconds = 100.0, - over: Seconds = 1.0, - raises: Type[BaseException] = MaxRestartsExceeded, - replacement: Callable[[ServiceT, int], - Awaitable[ServiceT]] = None, - **kwargs: Any) -> None: + def __init__( + self, + *services: ServiceT, + max_restarts: Seconds = 100.0, + over: Seconds = 1.0, + raises: Type[BaseException] = MaxRestartsExceeded, + replacement: Callable[[ServiceT, int], Awaitable[ServiceT]] = None, + **kwargs: Any, + ) -> None: self.max_restarts = want_seconds(max_restarts) self.over = want_seconds(over) self.raises = raises @@ -150,8 +151,7 @@ async def on_stop(self) -> None: except MemoryError: raise except Exception as exc: - self.log.exception( - 'Unable to stop service %r: %r', service, exc) + self.log.exception("Unable to stop service %r: %r", service, exc) async def start_services(self, services: List[ServiceT]) -> None: for service in services: @@ -168,13 +168,15 @@ async def stop_services(self, services: List[ServiceT]) -> None: # Stop them all simultaneously. await asyncio.gather( *[service.stop() for service in services], - loop=self.loop, ) async def restart_service(self, service: ServiceT) -> None: - self.log.info('Restarting dead %r! Last crash reason: %r', - service, service.crash_reason, - exc_info=1) + self.log.info( + "Restarting dead %r! Last crash reason: %r", + service, + service.crash_reason, + exc_info=1, + ) try: async with self._bucket: if self.replacement: @@ -185,12 +187,12 @@ async def restart_service(self, service: ServiceT) -> None: else: await service.restart() except MaxRestartsExceeded as exc: - self.log.warning('Max restarts exceeded: %r', exc, exc_info=1) + self.log.warning("Max restarts exceeded: %r", exc, exc_info=1) raise SystemExit(1) @property def label(self) -> str: - return f'{type(self).__name__}: ({len(self._services)}@{id(self):#x})' + return f"{type(self).__name__}: ({len(self._services)}@{id(self):#x})" class OneForOneSupervisor(SupervisorStrategy): @@ -216,7 +218,7 @@ class ForfeitOneForOneSupervisor(SupervisorStrategy): async def restart_services(self, services: List[ServiceT]) -> None: if services: - self.log.critical('Giving up on crashed services: %r', services) + self.log.critical("Giving up on crashed services: %r", services) await self.stop_services(services) @@ -226,7 +228,7 @@ class ForfeitOneForAllSupervisor(SupervisorStrategy): async def restart_services(self, services: List[ServiceT]) -> None: if services: self.log.critical( - 'Giving up on all services in group because %r crashed', + "Giving up on all services in group because %r crashed", services, ) await self.stop_services(self._services) diff --git a/mode/threads.py b/mode/threads.py index a6dabc46..2d59e689 100644 --- a/mode/threads.py +++ b/mode/threads.py @@ -24,19 +24,14 @@ ) from .services import Service -from .utils.futures import ( - maybe_async, - maybe_set_exception, - maybe_set_result, - notify, -) +from .utils.futures import maybe_async, maybe_set_exception, maybe_set_result, notify from .utils.locks import Event __all__ = [ - 'QueuedMethod', - 'WorkerThread', - 'ServiceThread', - 'QueueServiceThread', + "QueuedMethod", + "WorkerThread", + "ServiceThread", + "QueueServiceThread", ] @@ -52,10 +47,10 @@ class QueuedMethod(NamedTuple): class WorkerThread(threading.Thread): """Thread class used for services running in a dedicated thread.""" - service: 'ServiceThread' + service: "ServiceThread" is_stopped: threading.Event - def __init__(self, service: 'ServiceThread', **kwargs: Any) -> None: + def __init__(self, service: "ServiceThread", **kwargs: Any) -> None: super().__init__(**kwargs) self.service = service self.daemon = False @@ -93,23 +88,25 @@ class ServiceThread(Service): #: underlying thread to be fully started. wait_for_thread: bool = True - _thread: Optional['WorkerThread'] = None + _thread: Optional["WorkerThread"] = None _thread_started: Event _thread_running: Optional[asyncio.Future] = None last_wakeup_at: float = 0.0 - def __init__(self, - *, - executor: Any = None, - loop: asyncio.AbstractEventLoop = None, - thread_loop: asyncio.AbstractEventLoop = None, - Worker: Type[WorkerThread] = None, - **kwargs: Any) -> None: + def __init__( + self, + *, + executor: Any = None, + loop: asyncio.AbstractEventLoop = None, + thread_loop: asyncio.AbstractEventLoop = None, + Worker: Type[WorkerThread] = None, + **kwargs: Any, + ) -> None: # cannot share loop between threads, so create a new one assert asyncio.get_event_loop() if executor is not None: - raise NotImplementedError('executor argument no longer supported') + raise NotImplementedError("executor argument no longer supported") self.parent_loop = loop or asyncio.get_event_loop() self.thread_loop = thread_loop or asyncio.new_event_loop() self._thread_started = Event(loop=self.parent_loop) @@ -186,9 +183,10 @@ async def _keepalive2(self) -> None: await self.sleep(1.1) if self.last_wakeup_at: if monotonic() - self.last_wakeup_at > 3.0: - self.log.error('Thread keepalive is not responding...') + self.log.error("Thread keepalive is not responding...") asyncio.run_coroutine_threadsafe( - self._wakeup_timer_in_thread(), self.thread_loop) + self._wakeup_timer_in_thread(), self.thread_loop + ) async def _wakeup_timer_in_thread(self) -> None: self.last_wakeup_at = monotonic() @@ -201,7 +199,8 @@ async def crash(self, exc: BaseException) -> None: maybe_set_exception(self._thread_running, exc) else: self.parent_loop.call_soon_threadsafe( - maybe_set_exception, self._thread_running, exc) + maybe_set_exception, self._thread_running, exc + ) await super().crash(exc) def _start_thread(self) -> None: @@ -228,13 +227,13 @@ def set_shutdown(self) -> None: self.parent_loop.call_soon_threadsafe(self._shutdown.set) async def _stop_children(self) -> None: - ... # called by thread instead of .stop() + ... # called by thread instead of .stop() async def _stop_futures(self) -> None: - ... # called by thread instead of .stop() + ... # called by thread instead of .stop() async def _stop_exit_stacks(self) -> None: - ... # called by thread instead of .stop() + ... # called by thread instead of .stop() async def _shutdown_thread(self) -> None: await self.on_thread_stop() @@ -255,7 +254,7 @@ async def _serve(self) -> None: except asyncio.CancelledError: raise except BaseException as exc: # pylint: disable=broad-except - self.on_crash('{0!r} crashed: {1!r}', self.label, exc) + self.on_crash("{0!r} crashed: {1!r}", self.label, exc) await self.crash(exc) if self.beacon.root is not None: await self.beacon.root.data.crash(exc) @@ -266,9 +265,8 @@ async def _serve(self) -> None: @Service.task async def _thread_keepalive(self) -> None: async for sleep_time in self.itertimer( - 1.0, - name=f'_thread_keepalive-{self.label}', - loop=self.thread_loop): # pragma: no cover + 1.0, name=f"_thread_keepalive-{self.label}", loop=self.thread_loop + ): # pragma: no cover # The consumer thread will have a separate event loop, # and so we use this trick to make sure our loop is # being scheduled to run something at all times. @@ -284,13 +282,12 @@ def on_crash(self, msg: str, *fmt: Any, **kwargs: Any) -> None: class MethodQueueWorker(Service): index: int - method_queue: 'MethodQueue' - mundane_level = 'debug' + method_queue: "MethodQueue" + mundane_level = "debug" - def __init__(self, method_queue: 'MethodQueue', - *, - index: int, - **kwargs: Any) -> None: + def __init__( + self, method_queue: "MethodQueue", *, index: int, **kwargs: Any + ) -> None: self.method_queue = method_queue self.index = index super().__init__(**kwargs) @@ -311,7 +308,7 @@ async def _method_queue_do_work(self) -> None: @property def label(self) -> str: - return f'{type(self).__name__}@{id(self):#x} index={self.index}' + return f"{type(self).__name__}@{id(self):#x} index={self.index}" class MethodQueue(Service): @@ -321,14 +318,11 @@ class MethodQueue(Service): _queue_ready: Event _workers: List[MethodQueueWorker] - mundane_level = 'debug' + mundane_level = "debug" - def __init__(self, - loop: asyncio.AbstractEventLoop, - num_workers: int = 2, - **kwargs: Any) -> None: + def __init__(self, num_workers: int = 2, **kwargs: Any) -> None: super().__init__(**kwargs) - self._queue = asyncio.Queue(loop=self.loop) + self._queue = asyncio.Queue() self._queue_ready = Event(loop=self.loop) self.num_workers = num_workers self._workers = [] @@ -345,11 +339,13 @@ async def on_stop(self) -> None: await self.flush() self._workers[:] = [] - async def call(self, - promise: asyncio.Future, - fun: Callable[..., Awaitable], - *args: Any, - **kwargs: Any) -> asyncio.Future: + async def call( + self, + promise: asyncio.Future, + fun: Callable[..., Awaitable], + *args: Any, + **kwargs: Any, + ) -> asyncio.Future: method = QueuedMethod(promise, fun, args, kwargs) self.loop.call_soon_threadsafe(self._queue_put, method) return promise @@ -358,10 +354,9 @@ def _queue_put(self, method: QueuedMethod) -> None: self._queue.put_nowait(method) self._queue_ready.set() - async def cast(self, - fun: Callable[..., Awaitable], - *args: Any, - **kwargs: Any) -> None: + async def cast( + self, fun: Callable[..., Awaitable], *args: Any, **kwargs: Any + ) -> None: promise = self.loop.create_future() method = QueuedMethod(promise, fun, args, kwargs) self._queue.put_nowait(method) @@ -382,16 +377,14 @@ async def _process_enqueued(self, p: QueuedMethod) -> asyncio.Future: try: result = await maybe_async(method(*args, **kwargs)) except BaseException as exc: - promise._loop.call_soon_threadsafe( - maybe_set_exception, promise, exc) + promise._loop.call_soon_threadsafe(maybe_set_exception, promise, exc) else: - promise._loop.call_soon_threadsafe( - maybe_set_result, promise, result) + promise._loop.call_soon_threadsafe(maybe_set_result, promise, result) return promise @property def label(self) -> str: - return f'{type(self).__name__}@{id(self):#x}' + return f"{type(self).__name__}@{id(self):#x}" class QueueServiceThread(ServiceThread): @@ -421,24 +414,23 @@ async def on_thread_stop(self) -> None: if self._method_queue is not None: await self._method_queue.stop() - async def call_thread(self, - fun: Callable[..., Awaitable], - *args: Any, - **kwargs: Any) -> Any: + async def call_thread( + self, fun: Callable[..., Awaitable], *args: Any, **kwargs: Any + ) -> Any: # Enqueue method to be called by thread (synchronous). # We pass a future to the thread, so that when the call is done # the thread will call `future.set_result(result)`. promise = await self.method_queue.call( - self.parent_loop.create_future(), fun, *args, **kwargs) + self.parent_loop.create_future(), fun, *args, **kwargs + ) # wait for the promise to be fulfilled result = await promise return result - async def cast_thread(self, - fun: Callable[..., Awaitable], - *args: Any, - **kwargs: Any) -> None: + async def cast_thread( + self, fun: Callable[..., Awaitable], *args: Any, **kwargs: Any + ) -> None: # Enqueue method to be called by thread (asynchronous). await self.method_queue.cast(fun, *args, **kwargs) diff --git a/mode/timers.py b/mode/timers.py index 290dcdd0..ef38d670 100644 --- a/mode/timers.py +++ b/mode/timers.py @@ -3,10 +3,11 @@ from itertools import count from time import perf_counter from typing import AsyncIterator, Awaitable, Callable, Iterator + from .utils.logging import get_logger from .utils.times import Seconds, want_seconds -__all__ = ['Timer'] +__all__ = ["Timer"] MAX_DRIFT_PERCENT: float = 0.30 MAX_DRIFT_CEILING: float = 1.2 @@ -31,11 +32,15 @@ class Timer: last_yield_at: float iteration: int - def __init__(self, interval: Seconds, *, - max_drift_correction: float = 0.1, - name: str = '', - clock: ClockArg = perf_counter, - sleep: SleepArg = asyncio.sleep) -> None: + def __init__( + self, + interval: Seconds, + *, + max_drift_correction: float = 0.1, + name: str = "", + clock: ClockArg = perf_counter, + sleep: SleepArg = asyncio.sleep + ) -> None: self.interval = interval self.max_drift_correction = max_drift_correction self.name = name @@ -112,30 +117,44 @@ def tick(self) -> float: if drift < 0: self.drifting_late += 1 logger.info( - 'Timer %s woke up too late, with a drift of +%r ' - 'runtime=%r sleeptime=%r', - self.name, abs(drift), - time_spent_yielding, time_spent_sleeping) + "Timer %s woke up too late, with a drift of +%r " + "runtime=%r sleeptime=%r", + self.name, + abs(drift), + time_spent_yielding, + time_spent_sleeping, + ) else: self.drifting_early += 1 logger.info( - 'Timer %s woke up too early, with a drift of -%r ' - 'runtime=%r sleeptime=%r', - self.name, abs(drift), - time_spent_yielding, time_spent_sleeping) + "Timer %s woke up too early, with a drift of -%r " + "runtime=%r sleeptime=%r", + self.name, + abs(drift), + time_spent_yielding, + time_spent_sleeping, + ) else: logger.debug( - 'Timer %s woke up - iteration=%r ' - 'time_spent_sleeping=%r drift=%r ' - 'new_interval=%r since_epoch=%r', - self.name, self.iteration, - time_spent_sleeping, drift, new_interval, since_epoch) + "Timer %s woke up - iteration=%r " + "time_spent_sleeping=%r drift=%r " + "new_interval=%r since_epoch=%r", + self.name, + self.iteration, + time_spent_sleeping, + drift, + new_interval, + since_epoch, + ) if time_spent_yielding > interval_s: self.overlaps += 1 logger.warning( - 'Timer %s is overlapping (interval=%r runtime=%r)', - self.name, self.interval, time_spent_yielding) + "Timer %s is overlapping (interval=%r runtime=%r)", + self.name, + self.interval, + time_spent_yielding, + ) self.iteration += 1 self.last_wakeup_at = now @@ -147,20 +166,20 @@ def on_before_yield(self) -> None: def timer_intervals( # XXX deprecated - interval: Seconds, - max_drift_correction: float = 0.1, - name: str = '', - clock: ClockArg = perf_counter) -> Iterator[float]: + interval: Seconds, + max_drift_correction: float = 0.1, + name: str = "", + clock: ClockArg = perf_counter, +) -> Iterator[float]: """Generate timer sleep times. Note: This function is deprecated, please use :func:`itertimer` instead (this function also sleeps and calculates sleep time correctly.) """ - state = Timer(interval, - max_drift_correction=max_drift_correction, - name=name, - clock=clock) + state = Timer( + interval, max_drift_correction=max_drift_correction, name=name, clock=clock + ) for _ in count(): sleep_time = state.tick() state.on_before_yield() # includes callback time. diff --git a/mode/types/__init__.py b/mode/types/__init__.py index 5eaa776d..f0bbb91b 100644 --- a/mode/types/__init__.py +++ b/mode/types/__init__.py @@ -3,7 +3,11 @@ from .supervisors import SupervisorStrategyT __all__ = [ - 'DiagT', 'ServiceT', - 'BaseSignalT', 'SignalHandlerT', 'SignalT', 'SyncSignalT', - 'SupervisorStrategyT', + "DiagT", + "ServiceT", + "BaseSignalT", + "SignalHandlerT", + "SignalT", + "SyncSignalT", + "SupervisorStrategyT", ] diff --git a/mode/types/services.py b/mode/types/services.py index ad4204e2..2d190610 100644 --- a/mode/types/services.py +++ b/mode/types/services.py @@ -21,11 +21,11 @@ from .supervisors import SupervisorStrategyT __all__ = [ - 'DiagT', - 'ServiceT', + "DiagT", + "ServiceT", ] -T = TypeVar('T') +T = TypeVar("T") AsyncFun = Union[Awaitable[T], Coroutine[Any, Any, T]] @@ -37,7 +37,7 @@ class DiagT(abc.ABC): last_transition: MutableMapping[str, float] @abc.abstractmethod - def __init__(self, service: 'ServiceT') -> None: + def __init__(self, service: "ServiceT") -> None: ... @abc.abstractmethod @@ -68,17 +68,17 @@ class ServiceT(AsyncContextManager): supervisor: Optional[SupervisorStrategyT] = None @abc.abstractmethod - def __init__(self, *, - beacon: NodeT = None, - loop: asyncio.AbstractEventLoop = None) -> None: + def __init__( + self, *, beacon: NodeT = None, loop: asyncio.AbstractEventLoop = None + ) -> None: ... @abc.abstractmethod - def add_dependency(self, service: 'ServiceT') -> 'ServiceT': + def add_dependency(self, service: "ServiceT") -> "ServiceT": ... @abc.abstractmethod - async def add_runtime_dependency(self, service: 'ServiceT') -> 'ServiceT': + async def add_runtime_dependency(self, service: "ServiceT") -> "ServiceT": ... @abc.abstractmethod diff --git a/mode/types/signals.py b/mode/types/signals.py index aa7f1b7f..8c8396cb 100644 --- a/mode/types/signals.py +++ b/mode/types/signals.py @@ -2,7 +2,6 @@ import abc import asyncio import typing -from weakref import ReferenceType from typing import ( Any, Awaitable, @@ -15,39 +14,41 @@ TypeVar, Union, ) +from weakref import ReferenceType + from mypy_extensions import KwArg, NamedArg, VarArg __all__ = [ - 'BaseSignalT', - 'FilterReceiverMapping', - 'SignalHandlerT', - 'SignalHandlerRefT', - 'SignalT', - 'SyncSignalT', - 'T', - 'T_contra', + "BaseSignalT", + "FilterReceiverMapping", + "SignalHandlerT", + "SignalHandlerRefT", + "SignalT", + "SyncSignalT", + "T", + "T_contra", ] -T = TypeVar('T') -T_contra = TypeVar('T_contra', contravariant=True) +T = TypeVar("T") +T_contra = TypeVar("T_contra", contravariant=True) -signal = None # just here to fix flake8 bug +signal = None # just here to fix flake8 bug SignalHandlerT = Union[ Callable[ - [T, VarArg(), NamedArg('BaseSignalT', name='signal'), KwArg()], + [T, VarArg(), NamedArg("BaseSignalT", name="signal"), KwArg()], None, ], Callable[ - [T, VarArg(), NamedArg('BaseSignalT', name='signal'), KwArg()], + [T, VarArg(), NamedArg("BaseSignalT", name="signal"), KwArg()], Awaitable[None], ], ] if typing.TYPE_CHECKING: SignalHandlerRefT = Union[ - Callable[[], SignalHandlerT], - ReferenceType[SignalHandlerT]] + Callable[[], SignalHandlerT], ReferenceType[SignalHandlerT] + ] else: SignalHandlerRefT = Any @@ -61,21 +62,24 @@ class BaseSignalT(Generic[T]): owner: Optional[Type] @abc.abstractmethod - def __init__(self, *, - name: str = None, - owner: Type = None, - loop: asyncio.AbstractEventLoop = None, - default_sender: Any = None, - receivers: MutableSet[SignalHandlerRefT] = None, - filter_receivers: FilterReceiverMapping = None) -> None: + def __init__( + self, + *, + name: str = None, + owner: Type = None, + loop: asyncio.AbstractEventLoop = None, + default_sender: Any = None, + receivers: MutableSet[SignalHandlerRefT] = None, + filter_receivers: FilterReceiverMapping = None + ) -> None: ... @abc.abstractmethod - def clone(self, **kwargs: Any) -> 'BaseSignalT': + def clone(self, **kwargs: Any) -> "BaseSignalT": ... @abc.abstractmethod - def with_default_sender(self, sender: Any = None) -> 'BaseSignalT': + def with_default_sender(self, sender: Any = None) -> "BaseSignalT": ... @abc.abstractmethod @@ -83,10 +87,9 @@ def connect(self, fun: SignalHandlerT, **kwargs: Any) -> Callable: ... @abc.abstractmethod - def disconnect(self, fun: SignalHandlerT, - *, - sender: Any = None, - weak: bool = True) -> None: + def disconnect( + self, fun: SignalHandlerT, *, sender: Any = None, weak: bool = True + ) -> None: ... @@ -94,8 +97,7 @@ class SignalT(BaseSignalT[T]): """Base class for all async signals (using ``async def``).""" @abc.abstractmethod - async def __call__(self, sender: T_contra, - *args: Any, **kwargs: Any) -> None: + async def __call__(self, sender: T_contra, *args: Any, **kwargs: Any) -> None: ... @abc.abstractmethod @@ -104,12 +106,12 @@ async def send(self, sender: T_contra, *args: Any, **kwargs: Any) -> None: @typing.no_type_check @abc.abstractmethod - def clone(self, **kwargs: Any) -> 'SignalT': + def clone(self, **kwargs: Any) -> "SignalT": ... @typing.no_type_check @abc.abstractmethod - def with_default_sender(self, sender: Any = None) -> 'SignalT': + def with_default_sender(self, sender: Any = None) -> "SignalT": ... @@ -126,10 +128,10 @@ def send(self, sender: T_contra, *args: Any, **kwargs: Any) -> None: @typing.no_type_check @abc.abstractmethod - def clone(self, **kwargs: Any) -> 'SyncSignalT': + def clone(self, **kwargs: Any) -> "SyncSignalT": ... @typing.no_type_check @abc.abstractmethod - def with_default_sender(self, sender: Any = None) -> 'SyncSignalT': + def with_default_sender(self, sender: Any = None) -> "SyncSignalT": ... diff --git a/mode/types/supervisors.py b/mode/types/supervisors.py index 151c3de0..1b2cdfc2 100644 --- a/mode/types/supervisors.py +++ b/mode/types/supervisors.py @@ -2,14 +2,18 @@ import abc import typing from typing import Any, Awaitable, Callable, Optional, Type + from mode.utils.times import Seconds if typing.TYPE_CHECKING: from .services import ServiceT else: - class ServiceT: ... # noqa: E701 -__all__ = ['SupervisorStrategyT'] + class ServiceT: + ... # noqa: E701 + + +__all__ = ["SupervisorStrategyT"] ReplacementT = Callable[[ServiceT, int], Awaitable[ServiceT]] @@ -22,13 +26,15 @@ class SupervisorStrategyT(ServiceT): raises: Type[BaseException] @abc.abstractmethod - def __init__(self, - *services: ServiceT, - max_restarts: Seconds = 100.0, - over: Seconds = 1.0, - raises: Type[BaseException] = None, - replacement: ReplacementT = None, - **kwargs: Any) -> None: + def __init__( + self, + *services: ServiceT, + max_restarts: Seconds = 100.0, + over: Seconds = 1.0, + raises: Type[BaseException] = None, + replacement: ReplacementT = None, + **kwargs: Any + ) -> None: self.replacement: Optional[ReplacementT] = replacement @abc.abstractmethod diff --git a/mode/utils/_py37_contextlib.py b/mode/utils/_py37_contextlib.py index 98081e75..800765b1 100644 --- a/mode/utils/_py37_contextlib.py +++ b/mode/utils/_py37_contextlib.py @@ -2,7 +2,6 @@ import abc import sys import types -import _collections_abc from collections import deque from contextlib import AbstractContextManager from functools import wraps @@ -18,14 +17,17 @@ Union, cast, ) + +import _collections_abc + from .typing import AsyncContextManager, Deque __all__ = [ - 'AbstractAsyncContextManager', - 'AsyncExitStack', - 'ExitStack', - 'asynccontextmanager', - 'nullcontext', + "AbstractAsyncContextManager", + "AsyncExitStack", + "ExitStack", + "asynccontextmanager", + "nullcontext", ] AsyncCallable = Callable[..., Awaitable] @@ -36,23 +38,26 @@ class AbstractAsyncContextManager(abc.ABC): """An abstract base class for asynchronous context managers.""" - async def __aenter__(self) -> 'AbstractAsyncContextManager': + async def __aenter__(self) -> "AbstractAsyncContextManager": """Return `self` upon entering the runtime context.""" return self @abc.abstractmethod - async def __aexit__(self, - exc_type: Type[BaseException], - exc_value: BaseException, - traceback: types.TracebackType) -> None: + async def __aexit__( + self, + exc_type: Type[BaseException], + exc_value: BaseException, + traceback: types.TracebackType, + ) -> None: """Raise any exception triggered within the runtime context.""" return None @classmethod def __subclasshook__(cls, C: Type) -> bool: if cls is AbstractAsyncContextManager: - return cast(bool, _collections_abc._check_methods( - C, '__aenter__', '__aexit__')) + return cast( + bool, _collections_abc._check_methods(C, "__aenter__", "__aexit__") + ) return cast(bool, NotImplemented) @@ -66,7 +71,7 @@ def __init__(self, func: Callable, args: Tuple, kwds: Dict) -> None: self.gen = func(*args, **kwds) self.func, self.args, self.kwds = func, args, kwds # Issue 19330: ensure context manager instances have good docstrings - doc = getattr(func, '__doc__', None) + doc = getattr(func, "__doc__", None) if doc is None: doc = type(self).__doc__ self.__doc__ = doc @@ -84,10 +89,12 @@ async def __aenter__(self) -> Any: except StopAsyncIteration: raise RuntimeError("generator didn't yield") from None - async def __aexit__(self, - typ: Optional[Type[BaseException]], - value: Optional[BaseException], - traceback: Optional[types.TracebackType]) -> None: + async def __aexit__( + self, + typ: Optional[Type[BaseException]], + value: Optional[BaseException], + traceback: Optional[types.TracebackType], + ) -> None: if typ is None: try: await self.gen.__anext__() @@ -125,9 +132,11 @@ async def __aexit__(self, def asynccontextmanager(func: Callable) -> Callable[..., AsyncContextManager]: """None.""" + @wraps(func) def helper(*args: Any, **kwds: Any) -> _AsyncGeneratorContextManager: return _AsyncGeneratorContextManager(func, args, kwds) + return helper @@ -137,27 +146,27 @@ class _BaseExitStack: _exit_callbacks: Deque[Tuple[bool, Callable]] @staticmethod - def _create_exit_wrapper(cm: ContextManager, - cm_exit: Callable) -> Callable: - def _exit_wrapper(exc_type: Type, - exc: BaseException, - tb: types.TracebackType) -> Any: + def _create_exit_wrapper(cm: ContextManager, cm_exit: Callable) -> Callable: + def _exit_wrapper( + exc_type: Type, exc: BaseException, tb: types.TracebackType + ) -> Any: return cm_exit(cm, exc_type, exc, tb) + return _exit_wrapper @staticmethod - def _create_cb_wrapper(callback: Callable, - *args: Any, **kwds: Any) -> Callable: - def _exit_wrapper(exc_type: Type, - exc: BaseException, - tb: types.TracebackType) -> None: + def _create_cb_wrapper(callback: Callable, *args: Any, **kwds: Any) -> Callable: + def _exit_wrapper( + exc_type: Type, exc: BaseException, tb: types.TracebackType + ) -> None: callback(*args, **kwds) + return _exit_wrapper def __init__(self) -> None: self._exit_callbacks = deque() - def pop_all(self) -> '_BaseExitStack': + def pop_all(self) -> "_BaseExitStack": """Preserve the context stack by transferring it to a new instance.""" new_stack = type(self)() new_stack._exit_callbacks = self._exit_callbacks @@ -198,8 +207,7 @@ def enter_context(self, cm: ContextManager) -> Any: self._push_cm_exit(cm, _exit) return result - def callback(self, callback: Callable, - *args: Any, **kwds: Any) -> Callable: + def callback(self, callback: Callable, *args: Any, **kwds: Any) -> Callable: """Register an arbitrary callback and arguments. Cannot suppress exceptions. @@ -218,8 +226,7 @@ def _push_cm_exit(self, cm: ContextManager, cm_exit: Callable) -> None: _exit_wrapper.__self__ = cm # type: ignore self._push_exit_callback(_exit_wrapper, True) - def _push_exit_callback(self, callback: Callable, - is_sync: bool = True) -> None: + def _push_exit_callback(self, callback: Callable, is_sync: bool = True) -> None: self._exit_callbacks.append((is_sync, callback)) @@ -235,7 +242,7 @@ class ExitStack(_BaseExitStack, AbstractContextManager): # in the list raise an exception. """ - def __enter__(self) -> 'ExitStack': + def __enter__(self) -> "ExitStack": return self def __exit__(self, *exc_details: Any) -> None: @@ -245,8 +252,9 @@ def __exit__(self, *exc_details: Any) -> None: # we were actually nesting multiple with statements frame_exc = sys.exc_info()[1] - def _fix_exception_context(new_exc: BaseException, - old_exc: BaseException) -> None: + def _fix_exception_context( + new_exc: BaseException, old_exc: BaseException + ) -> None: # Context may not be correct, so find the end of the chain while 1: exc_context = new_exc.__context__ @@ -276,8 +284,8 @@ def _fix_exception_context(new_exc: BaseException, new_exc_details = sys.exc_info() # simulate the stack of exceptions by setting the context _fix_exception_context( - new_exc_details[1], # type: ignore - exc_details[1]) + new_exc_details[1], exc_details[1] # type: ignore + ) pending_raise = True exc_details = new_exc_details if pending_raise: @@ -315,20 +323,24 @@ class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager): @staticmethod def _create_async_exit_wrapper( - cm: AsyncContextManager, cm_exit: AsyncCallable) -> AsyncCallable: - async def _exit_wrapper(exc_type: Type, - exc: BaseException, - tb: types.TracebackType) -> Any: + cm: AsyncContextManager, cm_exit: AsyncCallable + ) -> AsyncCallable: + async def _exit_wrapper( + exc_type: Type, exc: BaseException, tb: types.TracebackType + ) -> Any: return await cm_exit(cm, exc_type, exc, tb) + return _exit_wrapper @staticmethod def _create_async_cb_wrapper( - callback: AsyncCallable, *args: Any, **kwds: Any) -> AsyncCallable: - async def _exit_wrapper(exc_type: Type, - exc: BaseException, - tb: types.TracebackType) -> None: + callback: AsyncCallable, *args: Any, **kwds: Any + ) -> AsyncCallable: + async def _exit_wrapper( + exc_type: Type, exc: BaseException, tb: types.TracebackType + ) -> None: await callback(*args, **kwds) + return _exit_wrapper async def enter_async_context(self, cm: AsyncContextManager) -> Any: @@ -360,8 +372,9 @@ def push_async_exit(self, exit: AsyncPushArg) -> AsyncPushArg: self._push_async_cm_exit(exit, exit_method) # type: ignore return exit # Allow use as a decorator - def push_async_callback(self, callback: AsyncCallable, - *args: Any, **kwds: Any) -> AsyncCallable: + def push_async_callback( + self, callback: AsyncCallable, *args: Any, **kwds: Any + ) -> AsyncCallable: """Register an arbitrary coroutine function and arguments. Cannot suppress exceptions. @@ -378,15 +391,14 @@ async def aclose(self) -> None: """Immediately unwind the context stack.""" await self.__aexit__(None, None, None) - def _push_async_cm_exit( - self, cm: AsyncContextManager, cm_exit: Callable) -> None: + def _push_async_cm_exit(self, cm: AsyncContextManager, cm_exit: Callable) -> None: # Helper to correctly register coroutine function to __aexit__ # method. _exit_wrapper = self._create_async_exit_wrapper(cm, cm_exit) _exit_wrapper.__self__ = cm # type: ignore self._push_exit_callback(_exit_wrapper, False) - async def __aenter__(self) -> 'AsyncExitStack': + async def __aenter__(self) -> "AsyncExitStack": return self async def __aexit__(self, *exc_details: Any) -> Any: @@ -397,7 +409,8 @@ async def __aexit__(self, *exc_details: Any) -> Any: frame_exc = sys.exc_info()[1] def _fix_exception_context( - new_exc: BaseException, old_exc: BaseException) -> None: + new_exc: BaseException, old_exc: BaseException + ) -> None: # Context may not be correct, so find the end of the chain while 1: exc_context = new_exc.__context__ @@ -431,8 +444,8 @@ def _fix_exception_context( new_exc_details = sys.exc_info() # simulate the stack of exceptions by setting the context _fix_exception_context( - new_exc_details[1], # type: ignore - exc_details[1]) + new_exc_details[1], exc_details[1] # type: ignore + ) pending_raise = True exc_details = new_exc_details if pending_raise: @@ -464,8 +477,10 @@ def __init__(self, enter_result: Any = None) -> None: def __enter__(self) -> Any: return self.enter_result - def __exit__(self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[types.TracebackType]) -> None: + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[types.TracebackType], + ) -> None: pass diff --git a/mode/utils/aiter.py b/mode/utils/aiter.py index 5118dd88..8895de22 100644 --- a/mode/utils/aiter.py +++ b/mode/utils/aiter.py @@ -16,20 +16,21 @@ ) __all__ = [ - 'aenumerate', - 'aiter', - 'alist', - 'anext', - 'arange', - 'aslice', - 'chunks', + "aenumerate", + "aiter", + "alist", + "anext", + "arange", + "aslice", + "chunks", ] -T = TypeVar('T') +T = TypeVar("T") -async def aenumerate(it: AsyncIterable[T], - start: int = 0) -> AsyncIterator[Tuple[int, T]]: +async def aenumerate( + it: AsyncIterable[T], start: int = 0 +) -> AsyncIterator[Tuple[int, T]]: """``async for`` version of ``enumerate``.""" i = start async for item in it: @@ -53,7 +54,7 @@ async def __anext__(self) -> T: raise StopAsyncIteration() from exc def __repr__(self) -> str: - return f'<{type(self).__name__}: {self._it}>' + return f"<{type(self).__name__}: {self._it}>" @singledispatch @@ -64,7 +65,7 @@ def aiter(it: Any) -> AsyncIterator[T]: If the object is already an iterator, the iterator should return self when ``__aiter__`` is called. """ - raise TypeError(f'{it!r} object is not an iterable') + raise TypeError(f"{it!r} object is not an iterable") # XXX In Py3.7: register cannot take typing.AsyncIterator @@ -95,8 +96,7 @@ async def anext(it: AsyncIterator[T], *default: Optional[T]) -> T: class _ARangeIterator(AsyncIterator[int]): - - def __init__(self, parent: 'arange', it: Iterator[int]) -> None: + def __init__(self, parent: "arange", it: Iterator[int]) -> None: self.parent = arange self.it = it @@ -113,9 +113,9 @@ async def __anext__(self) -> int: class arange(AsyncIterable[int]): """Async generator that counts like :class:`range`.""" - def __init__(self, - *slice_args: Optional[int], - **slice_kwargs: Optional[int]) -> None: + def __init__( + self, *slice_args: Optional[int], **slice_kwargs: Optional[int] + ) -> None: s = slice(*slice_args, **slice_kwargs) self.start = s.start or 0 self.stop = s.stop diff --git a/mode/utils/collections.py b/mode/utils/collections.py index 52c2fda0..1636108c 100644 --- a/mode/utils/collections.py +++ b/mode/utils/collections.py @@ -48,28 +48,33 @@ try: from django.utils.functional import LazyObject, LazySettings except ImportError: - class LazyObject: ... # noqa - class LazySettings: ... # noqa + + class LazyObject: + ... # noqa + + class LazySettings: + ... # noqa + __all__ = [ - 'Heap', - 'FastUserDict', - 'FastUserSet', - 'FastUserList', - 'LRUCache', - 'ManagedUserDict', - 'ManagedUserSet', - 'AttributeDict', - 'AttributeDictMixin', - 'DictAttribute', - 'force_mapping', + "Heap", + "FastUserDict", + "FastUserSet", + "FastUserList", + "LRUCache", + "ManagedUserDict", + "ManagedUserSet", + "AttributeDict", + "AttributeDictMixin", + "DictAttribute", + "force_mapping", ] -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -KT = TypeVar('KT') -VT = TypeVar('VT') -_S = TypeVar('_S') +T = TypeVar("T") +T_co = TypeVar("T_co", covariant=True) +KT = TypeVar("KT") +VT = TypeVar("VT") +_S = TypeVar("_S") _Setlike = Union[AbstractSet[T], Iterable[T]] @@ -90,7 +95,8 @@ def pop(self, index: int = 0) -> T: return heappop(self.data) else: raise NotImplementedError( - 'Heap can only pop index 0, please use h.data.pop(index)') + "Heap can only pop index 0, please use h.data.pop(index)" + ) def push(self, item: T) -> None: """Push item onto heap, maintaining the heap invariant.""" @@ -189,15 +195,13 @@ class FastUserDict(MutableMapping[KT, VT]): data: MutableMapping[KT, VT] @classmethod - def fromkeys(cls, - iterable: Iterable[KT], - value: VT = None) -> 'FastUserDict': + def fromkeys(cls, iterable: Iterable[KT], value: VT = None) -> "FastUserDict": d = cls() d.update({k: value for k in iterable}) return d def __getitem__(self, key: KT) -> VT: - if not hasattr(self, '__missing__'): + if not hasattr(self, "__missing__"): return self.data[key] if key in self.data: return self.data[key] @@ -322,28 +326,26 @@ def issuperset(self, other: AbstractSet[T]) -> bool: return self.data.issuperset(other) # type: ignore def symmetric_difference(self, other: _Setlike[T]) -> MutableSet[T]: - return cast( - MutableSet, - self.data.symmetric_difference(other)) # type: ignore + return cast(MutableSet, self.data.symmetric_difference(other)) # type: ignore def union(self, other: _Setlike[T]) -> MutableSet[T]: return cast(MutableSet, self.data.union(other)) # type: ignore # -- Mutable Methods -- - def __iand__(self, other: AbstractSet[Any]) -> 'FastUserSet': + def __iand__(self, other: AbstractSet[Any]) -> "FastUserSet": self.data.__iand__(other) return self - def __ior__(self, other: AbstractSet[_S]) -> 'FastUserSet': + def __ior__(self, other: AbstractSet[_S]) -> "FastUserSet": self.data.__ior__(other) return self - def __isub__(self, other: AbstractSet[Any]) -> 'FastUserSet[T]': + def __isub__(self, other: AbstractSet[Any]) -> "FastUserSet[T]": self.data.__isub__(other) return self - def __ixor__(self, other: AbstractSet[_S]) -> 'FastUserSet': + def __ixor__(self, other: AbstractSet[_S]) -> "FastUserSet": self.data.__ixor__(other) return self @@ -380,7 +382,6 @@ class FastUserList(UserList): class MappingViewProxy(Generic[KT, VT]): - @abc.abstractmethod def _keys(self) -> Iterator[KT]: ... @@ -395,7 +396,6 @@ def _items(self) -> Iterator[Tuple[KT, VT]]: class ProxyKeysView(KeysView[KT]): - def __init__(self, mapping: MappingViewProxy[KT, Any]) -> None: self._mapping: MappingViewProxy[KT, Any] = mapping @@ -404,7 +404,6 @@ def __iter__(self) -> Iterator[KT]: class ProxyValuesView(ValuesView[VT]): - def __init__(self, mapping: MappingViewProxy[Any, VT]) -> None: self._mapping: MappingViewProxy[Any, VT] = mapping @@ -413,7 +412,6 @@ def __iter__(self) -> Iterator[VT]: class ProxyItemsView(ItemsView): - def __init__(self, mapping: MappingViewProxy) -> None: self._mapping = mapping @@ -438,9 +436,7 @@ class LRUCache(FastUserDict, MutableMapping[KT, VT], MappingViewProxy): _mutex: ContextManager data: OrderedDict - def __init__(self, limit: int = None, - *, - thread_safety: bool = False) -> None: + def __init__(self, limit: int = None, *, thread_safety: bool = False) -> None: self.limit = limit self.thread_safety = thread_safety self._mutex = self._new_lock() @@ -519,7 +515,7 @@ def _new_lock(self) -> ContextManager: def __getstate__(self) -> Mapping[str, Any]: d = dict(vars(self)) - d.pop('_mutex') + d.pop("_mutex") return d def __setstate__(self, state: Dict[str, Any]) -> None: @@ -564,7 +560,7 @@ def pop(self) -> T: def raw_update(self, *args: Any, **kwargs: Any) -> None: self.data.update(*args, **kwargs) # type: ignore - def __iand__(self, other: AbstractSet[Any]) -> 'FastUserSet': + def __iand__(self, other: AbstractSet[Any]) -> "FastUserSet": self.on_change( added=set(), removed=cast(Set, self).difference(other), @@ -572,7 +568,7 @@ def __iand__(self, other: AbstractSet[Any]) -> 'FastUserSet': self.data.__iand__(other) return self - def __ior__(self, other: AbstractSet[_S]) -> 'FastUserSet': + def __ior__(self, other: AbstractSet[_S]) -> "FastUserSet": self.on_change( added=cast(Set, other).difference(self), removed=set(), @@ -580,7 +576,7 @@ def __ior__(self, other: AbstractSet[_S]) -> 'FastUserSet': self.data.__ior__(other) return self - def __isub__(self, other: AbstractSet[Any]) -> 'FastUserSet': + def __isub__(self, other: AbstractSet[Any]) -> "FastUserSet": self.on_change( added=set(), removed=cast(Set, self.data).intersection(other), @@ -588,7 +584,7 @@ def __isub__(self, other: AbstractSet[Any]) -> 'FastUserSet': self.data.__isub__(other) return self - def __ixor__(self, other: AbstractSet[_S]) -> 'FastUserSet': + def __ixor__(self, other: AbstractSet[_S]) -> "FastUserSet": self.on_change( added=cast(Set, other).difference(self.data), removed=cast(Set, self.data).intersection(other), @@ -688,7 +684,8 @@ def __getattr__(self, k: str) -> Any: return cast(Mapping, self)[k] except KeyError: raise AttributeError( - f'{type(self).__name__!r} object has no attribute {k!r}') + f"{type(self).__name__!r} object has no attribute {k!r}" + ) def __setattr__(self, key: str, value: Any) -> None: """`d[key] = value -> d.key = value`.""" @@ -709,7 +706,7 @@ class DictAttribute(MutableMapping[str, VT], MappingViewProxy): obj: Any = None def __init__(self, obj: Any) -> None: - object.__setattr__(self, 'obj', obj) + object.__setattr__(self, "obj", obj) def __getattr__(self, key: Any) -> Any: return getattr(self.obj, key) @@ -763,6 +760,8 @@ def _items(self) -> Iterator[Tuple[str, VT]]: obj = self.obj for key in self: yield key, getattr(obj, key) + + collections.abc.MutableMapping.register(DictAttribute) # noqa: E305 diff --git a/mode/utils/compat.py b/mode/utils/compat.py index 05bd36af..0fbbb8a8 100644 --- a/mode/utils/compat.py +++ b/mode/utils/compat.py @@ -1,23 +1,22 @@ """Compatibility utilities.""" -from typing import AnyStr, IO +from typing import IO, AnyStr from .contexts import asyncnullcontext, nullcontext from .futures import current_task from .typing import AsyncContextManager, ChainMap, Counter, Deque, NoReturn - __all__ = [ - 'AsyncContextManager', # XXX moved to .typing - 'ChainMap', # XXX moved to .typing - 'Counter', # XXX moved to .typing - 'Deque', # XXX moved to .typing - 'NoReturn', # XXX moved to .typing - 'DummyContext', - 'OrderedDict', - 'want_bytes', - 'want_str', - 'isatty', - 'current_task', # XXX moved to .futures + "AsyncContextManager", # XXX moved to .typing + "ChainMap", # XXX moved to .typing + "Counter", # XXX moved to .typing + "Deque", # XXX moved to .typing + "NoReturn", # XXX moved to .typing + "DummyContext", + "OrderedDict", + "want_bytes", + "want_str", + "isatty", + "current_task", # XXX moved to .futures ] #: Dictionaries are ordered by default in Python 3.6 @@ -52,4 +51,5 @@ def isatty(fh: IO) -> bool: class DummyContext(nullcontext, asyncnullcontext): """Context for with-statement doing nothing.""" + # XXX deprecated, use nullcontext or asyncnullcontext diff --git a/mode/utils/contexts.py b/mode/utils/contexts.py index 53d9c98e..e2ccc3ec 100644 --- a/mode/utils/contexts.py +++ b/mode/utils/contexts.py @@ -4,35 +4,44 @@ from typing import Any, Type if typing.TYPE_CHECKING: - from ._py37_contextlib import AbstractAsyncContextManager - from ._py37_contextlib import AsyncExitStack, ExitStack - from ._py37_contextlib import asynccontextmanager - from ._py37_contextlib import nullcontext + from ._py37_contextlib import ( + AbstractAsyncContextManager, + AsyncExitStack, + ExitStack, + asynccontextmanager, + nullcontext, + ) else: try: # pragma: no cover - from contextlib import AbstractAsyncContextManager - from contextlib import AsyncExitStack, ExitStack - from contextlib import asynccontextmanager - from contextlib import nullcontext + from contextlib import ( + AbstractAsyncContextManager, + AsyncExitStack, + ExitStack, + asynccontextmanager, + nullcontext, + ) except ImportError: # pragma: no cover - from ._py37_contextlib import AbstractAsyncContextManager - from ._py37_contextlib import AsyncExitStack, ExitStack - from ._py37_contextlib import asynccontextmanager - from ._py37_contextlib import nullcontext + from ._py37_contextlib import ( + AbstractAsyncContextManager, + AsyncExitStack, + ExitStack, + asynccontextmanager, + nullcontext, + ) __all__ = [ - 'AbstractAsyncContextManager', - 'AsyncExitStack', - 'ExitStack', - 'asynccontextmanager', - 'nullcontext', - 'asyncnullcontext', + "AbstractAsyncContextManager", + "AsyncExitStack", + "ExitStack", + "asynccontextmanager", + "nullcontext", + "asyncnullcontext", ] # Sphinx complains that stdlib is badly formatted :P -AsyncExitStack.__doc__ = ''' +AsyncExitStack.__doc__ = """ Async context manager for dynamic management of a stack of exit callbacks. @@ -43,9 +52,9 @@ ... # All opened connections will automatically be released at the ... # end of the async with statement, even if attempts to open a ... # connection later in the list raise an exception. -''' -asynccontextmanager.__doc__ = 'asynccontextmanager decorator.' -nullcontext.__doc__ = 'Context that does nothing.' +""" +asynccontextmanager.__doc__ = "asynccontextmanager decorator." +nullcontext.__doc__ = "Context that does nothing." class asyncnullcontext(AbstractAsyncContextManager): @@ -59,8 +68,10 @@ def __init__(self, enter_result: Any = None) -> None: async def __aenter__(self) -> Any: return self.enter_result - async def __aexit__(self, - exc_type: Type[BaseException] = None, - exc_val: BaseException = None, - exc_tb: TracebackType = None) -> None: + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: TracebackType = None, + ) -> None: ... diff --git a/mode/utils/cron.py b/mode/utils/cron.py new file mode 100644 index 00000000..a177ceca --- /dev/null +++ b/mode/utils/cron.py @@ -0,0 +1,18 @@ +"""Crontab Utilities.""" +import time +from datetime import datetime, tzinfo +from typing import cast + +from croniter.croniter import croniter + + +def secs_for_next(cron_format: str, tz: tzinfo = None) -> float: + """Return seconds until next execution given Crontab style format.""" + now_ts = time.time() + # If we have a tz object we'll make now timezone aware, and + # if not will set now to be the current timestamp (tz + # unaware) + # If we have tz, now will be a datetime, if not an integer + now = tz and datetime.now(tz) or now_ts + cron_it = croniter(cron_format, start_time=now) + return cast(float, cron_it.get_next(float)) - now_ts diff --git a/mode/utils/futures.py b/mode/utils/futures.py index 0f8e45af..0775c77e 100644 --- a/mode/utils/futures.py +++ b/mode/utils/futures.py @@ -10,36 +10,39 @@ try: # pragma: no cover from asyncio import all_tasks # type: ignore except ImportError: # pragma: no cover - def all_tasks( - loop: asyncio.AbstractEventLoop) -> Set[asyncio.Task]: # noqa + + def all_tasks(loop: asyncio.AbstractEventLoop) -> Set[asyncio.Task]: # noqa return asyncio.Task.all_tasks(loop=loop) + try: # pragma: no cover from asyncio import current_task # type: ignore except ImportError: # pragma: no cover current_task = asyncio.Task.current_task __all__ = [ - 'all_tasks', - 'current_task', - 'done_future', - 'maybe_async', - 'maybe_cancel', - 'maybe_set_exception', - 'maybe_set_result', - 'stampede', - 'notify', + "all_tasks", + "current_task", + "done_future", + "maybe_async", + "maybe_cancel", + "maybe_set_exception", + "maybe_set_result", + "stampede", + "notify", ] class StampedeWrapper: fut: Optional[asyncio.Future] = None - def __init__(self, - fun: Callable, - *args: Any, - loop: asyncio.AbstractEventLoop = None, - **kwargs: Any) -> None: + def __init__( + self, + fun: Callable, + *args: Any, + loop: asyncio.AbstractEventLoop = None, + **kwargs: Any + ) -> None: self.fun = fun self.args = args self.kwargs = kwargs @@ -115,8 +118,9 @@ def __get__(self, obj: Any, type: Type = None) -> Any: return w -def done_future(result: Any = None, *, - loop: asyncio.AbstractEventLoop = None) -> asyncio.Future: +def done_future( + result: Any = None, *, loop: asyncio.AbstractEventLoop = None +) -> asyncio.Future: """Return :class:`asyncio.Future` that is already evaluated.""" f = (loop or asyncio.get_event_loop()).create_future() f.set_result(result) @@ -142,8 +146,7 @@ def maybe_cancel(fut: Optional[asyncio.Future]) -> bool: return False -def maybe_set_exception(fut: Optional[asyncio.Future], - exc: BaseException) -> bool: +def maybe_set_exception(fut: Optional[asyncio.Future], exc: BaseException) -> bool: """Set future exception if not already done.""" if fut is not None and not fut.done(): fut.set_exception(exc) @@ -151,8 +154,7 @@ def maybe_set_exception(fut: Optional[asyncio.Future], return False -def maybe_set_result(fut: Optional[asyncio.Future], - result: Any) -> bool: +def maybe_set_result(fut: Optional[asyncio.Future], result: Any) -> bool: """Set future result if not already done.""" if fut is not None and not fut.done(): fut.set_result(result) diff --git a/mode/utils/graphs/__init__.py b/mode/utils/graphs/__init__.py index 1d4f349f..09b2c682 100644 --- a/mode/utils/graphs/__init__.py +++ b/mode/utils/graphs/__init__.py @@ -1,4 +1,4 @@ from .formatter import GraphFormatter from .graph import DependencyGraph -__all__ = ['DependencyGraph', 'GraphFormatter'] +__all__ = ["DependencyGraph", "GraphFormatter"] diff --git a/mode/utils/graphs/formatter.py b/mode/utils/graphs/formatter.py index 8371548d..be553840 100644 --- a/mode/utils/graphs/formatter.py +++ b/mode/utils/graphs/formatter.py @@ -2,15 +2,15 @@ from typing import Any, Mapping from mode.utils.objects import label -from mode.utils.types.graphs import GraphFormatterT, _T +from mode.utils.types.graphs import _T, GraphFormatterT def dedent_initial(s: str, n: int = 4) -> str: """Remove identation from first line of text.""" - return s[n:] if s[:n] == ' ' * n else s + return s[n:] if s[:n] == " " * n else s -def dedent(s: str, n: int = 4, sep: str = '\n') -> str: +def dedent(s: str, n: int = 4, sep: str = "\n") -> str: """Remove identation.""" return sep.join(dedent_initial(line) for line in s.splitlines()) @@ -18,16 +18,18 @@ def dedent(s: str, n: int = 4, sep: str = '\n') -> str: class DOT: """Constants related to the dot format.""" - HEAD = dedent(''' + HEAD = dedent( + """ {IN}{type} {id} {{ {INp}graph [{attrs}] - ''') - ATTR = '{name}={value}' + """ + ) + ATTR = "{name}={value}" NODE = '{INp}"{0}" [{attrs}]' EDGE = '{INp}"{0}" {dir} "{1}" [{attrs}]' - ATTRSEP = ', ' - DIRS = {'graph': '--', 'digraph': '->'} - TAIL = '{IN}}}' + ATTRSEP = ", " + DIRS = {"graph": "--", "digraph": "->"} + TAIL = "{IN}}}" class GraphFormatter(GraphFormatterT): @@ -42,38 +44,40 @@ class GraphFormatter(GraphFormatterT): _dirs = dict(DOT.DIRS) scheme: Mapping[str, Any] = { - 'shape': 'box', - 'arrowhead': 'vee', - 'style': 'filled', - 'fontname': 'HelveticaNeue', + "shape": "box", + "arrowhead": "vee", + "style": "filled", + "fontname": "HelveticaNeue", } edge_scheme: Mapping[str, Any] = { - 'color': 'darkseagreen4', - 'arrowcolor': 'black', - 'arrowsize': 0.7, + "color": "darkseagreen4", + "arrowcolor": "black", + "arrowsize": 0.7, } node_scheme: Mapping[str, Any] = { - 'fillcolor': 'palegreen3', - 'color': 'palegreen4', + "fillcolor": "palegreen3", + "color": "palegreen4", } term_scheme: Mapping[str, Any] = { - 'fillcolor': 'palegreen1', - 'color': 'palegreen2', + "fillcolor": "palegreen1", + "color": "palegreen2", } graph_scheme: Mapping[str, Any] = { - 'bgcolor': 'mintcream', + "bgcolor": "mintcream", } - def __init__(self, - root: Any = None, - type: str = None, - id: str = None, - indent: int = 0, - inw: str = ' ' * 4, - **scheme: Any) -> None: - self.id = id or 'dependencies' + def __init__( + self, + root: Any = None, + type: str = None, + id: str = None, + indent: int = 0, + inw: str = " " * 4, + **scheme: Any, + ) -> None: + self.id = id or "dependencies" self.root = root - self.type = type or 'digraph' + self.type = type or "digraph" self.direction = self._dirs[self.type] self.IN = inw * (indent or 0) self.INp = self.IN + inw @@ -86,13 +90,13 @@ def attr(self, name: str, value: Any) -> str: def attrs(self, d: Mapping = None, scheme: Mapping = None) -> str: scheme = {**self.scheme, **scheme} if scheme else self.scheme d = {**scheme, **d} if d else scheme - return self._attrsep.join( - str(self.attr(k, v)) for k, v in d.items() - ) + return self._attrsep.join(str(self.attr(k, v)) for k, v in d.items()) def head(self, **attrs: Any) -> str: return self.FMT( - self._head, id=self.id, type=self.type, + self._head, + id=self.id, + type=self.type, attrs=self.attrs(attrs, self.graph_scheme), ) @@ -112,23 +116,25 @@ def edge(self, a: _T, b: _T, **attrs: Any) -> str: return self.draw_edge(a, b, **attrs) def _enc(self, s: str) -> str: - return s.encode('utf-8', 'ignore').decode() + return s.encode("utf-8", "ignore").decode() def FMT(self, fmt: str, *args: Any, **kwargs: Any) -> str: - return self._enc(fmt.format( - *args, **dict(kwargs, IN=self.IN, INp=self.INp))) + return self._enc(fmt.format(*args, **dict(kwargs, IN=self.IN, INp=self.INp))) - def draw_edge(self, a: _T, b: _T, - scheme: Mapping = None, - attrs: Mapping = None) -> str: + def draw_edge( + self, a: _T, b: _T, scheme: Mapping = None, attrs: Mapping = None + ) -> str: return self.FMT( - self._edge, self.label(a), self.label(b), - dir=self.direction, attrs=self.attrs(attrs, self.edge_scheme), + self._edge, + self.label(a), + self.label(b), + dir=self.direction, + attrs=self.attrs(attrs, self.edge_scheme), ) - def draw_node(self, obj: _T, - scheme: Mapping = None, - attrs: Mapping = None) -> str: + def draw_node(self, obj: _T, scheme: Mapping = None, attrs: Mapping = None) -> str: return self.FMT( - self._node, self.label(obj), attrs=self.attrs(attrs, scheme), + self._node, + self.label(obj), + attrs=self.attrs(attrs, scheme), ) diff --git a/mode/utils/graphs/graph.py b/mode/utils/graphs/graph.py index eae040e6..ec943695 100644 --- a/mode/utils/graphs/graph.py +++ b/mode/utils/graphs/graph.py @@ -1,9 +1,9 @@ """Data structure: Dependency graph.""" from functools import partial from typing import ( + IO, Any, Callable, - IO, ItemsView, Iterable, Iterator, @@ -14,8 +14,8 @@ cast, ) +from mode.utils.types.graphs import _T, DependencyGraphT, GraphFormatterT from mode.utils.typing import Counter -from mode.utils.types.graphs import DependencyGraphT, GraphFormatterT, _T from .formatter import GraphFormatter @@ -39,9 +39,9 @@ class DependencyGraph(DependencyGraphT): adjacent: MutableMapping - def __init__(self, - it: Iterable = None, - formatter: GraphFormatterT[_T] = None) -> None: + def __init__( + self, it: Iterable = None, formatter: GraphFormatterT[_T] = None + ) -> None: self.formatter = formatter or GraphFormatter() self.adjacent = {} if it is not None: @@ -71,9 +71,7 @@ def topsort(self) -> Sequence: graph = DependencyGraph() components = self._tarjan72() - NC = { - node: component for component in components for node in component - } + NC = {node: component for component in components for node in component} for component in components: graph.add_arc(component) for node in self: @@ -165,9 +163,7 @@ def visit(node: Any) -> None: return result - def to_dot(self, fh: IO, - *, - formatter: GraphFormatterT[_T] = None) -> None: + def to_dot(self, fh: IO, *, formatter: GraphFormatterT[_T] = None) -> None: """Convert the graph to DOT format. Arguments: @@ -210,16 +206,13 @@ def items(self) -> ItemsView: return cast(ItemsView, self.adjacent.items()) def __repr__(self) -> str: - return '\n'.join(self._repr_node(N) for N in self) + return "\n".join(self._repr_node(N) for N in self) - def _repr_node(self, obj: _T, - level: int = 1, - fmt: str = '{0}({1})') -> str: + def _repr_node(self, obj: _T, level: int = 1, fmt: str = "{0}({1})") -> str: output = [fmt.format(obj, self.valency_of(obj))] if obj in self: for other in self[obj]: d = fmt.format(other, self.valency_of(other)) - output.append(' ' * level + d) - output.extend( - self._repr_node(other, level + 1).split('\n')[1:]) - return '\n'.join(output) + output.append(" " * level + d) + output.extend(self._repr_node(other, level + 1).split("\n")[1:]) + return "\n".join(output) diff --git a/mode/utils/imports.py b/mode/utils/imports.py index 7441d65b..8a2c4365 100644 --- a/mode/utils/imports.py +++ b/mode/utils/imports.py @@ -34,27 +34,28 @@ try: from yarl import URL except ImportError: # pragma: no cover - class URL: + class URL: def __init__(self, url: str) -> None: - assert '://' in url - self.scheme = url.split('://')[0] + assert "://" in url + self.scheme = url.split("://")[0] + # - these are taken from kombu.utils.imports __all__ = [ - 'FactoryMapping', - 'SymbolArg', - 'symbol_by_name', - 'load_extension_class_names', - 'load_extension_classes', - 'cwd_in_path', - 'import_from_cwd', - 'smart_import', + "FactoryMapping", + "SymbolArg", + "symbol_by_name", + "load_extension_class_names", + "load_extension_classes", + "cwd_in_path", + "import_from_cwd", + "smart_import", ] -_T = TypeVar('_T') -_T_contra = TypeVar('_T_contra', contravariant=True) +_T = TypeVar("_T") +_T_contra = TypeVar("_T_contra", contravariant=True) SymbolArg = Union[_T, str] @@ -106,13 +107,14 @@ def by_name(self, name: SymbolArg[_T]) -> _T: return symbol_by_name(name, aliases=self.aliases) except ModuleNotFoundError as exc: name_ = cast(str, name) - if '.' in name_: + if "." in name_: raise alt = didyoumean( - self.aliases, name_, - fmt_none=f'Available choices: {", ".join(self.aliases)}') - raise ModuleNotFoundError( - f'{name!r} is not a valid name. {alt}') from exc + self.aliases, + name_, + fmt_none=f'Available choices: {", ".join(self.aliases)}', + ) + raise ModuleNotFoundError(f"{name!r} is not a valid name. {alt}") from exc def get_alias(self, name: str) -> str: self._maybe_finalize() @@ -128,10 +130,12 @@ def _maybe_finalize(self) -> None: def _finalize(self) -> None: for namespace in self.namespaces: - self.aliases.update({ - name: cls_name - for name, cls_name in load_extension_class_names(namespace) - }) + self.aliases.update( + { + name: cls_name + for name, cls_name in load_extension_class_names(namespace) + } + ) @cached_property def data(self) -> MutableMapping: # type: ignore @@ -139,10 +143,11 @@ def data(self) -> MutableMapping: # type: ignore def _ensure_identifier(path: str, full: str) -> None: - for part in path.split('.'): + for part in path.split("."): if not part.isidentifier(): raise ValueError( - f'Component {part!r} of {full!r} is not a valid identifier') + f"Component {part!r} of {full!r} is not a valid identifier" + ) class ParsedSymbol(NamedTuple): @@ -152,10 +157,13 @@ class ParsedSymbol(NamedTuple): attribute_name: Optional[str] -def parse_symbol(s: str, *, - package: str = None, - strict_separator: str = ':', - relative_separator: str = '.') -> ParsedSymbol: +def parse_symbol( + s: str, + *, + package: str = None, + strict_separator: str = ":", + relative_separator: str = ".", +) -> ParsedSymbol: """Parse :func:`symbol_by_name` argument into components. Returns: @@ -180,25 +188,22 @@ def parse_symbol(s: str, *, """ module_name: Optional[str] attribute_name: Optional[str] - partition_by = (strict_separator - if strict_separator in s else relative_separator) + partition_by = strict_separator if strict_separator in s else relative_separator module_name, used_separator, attribute_name = s.rpartition(partition_by) if not module_name: # Module name is missing must be either ".foo" or ":foo", # and is a relative import. - if used_separator == ':': + if used_separator == ":": # ":foo" is illegal and will result in ValueError below. raise ValueError(f'Missing module name with ":" separator: {s!r}') - elif used_separator == '.': + elif used_separator == ".": # ".foo" is legal but requires a ``package`` argument. if not package: - raise ValueError( - f'Relative import {s!r} but package=None (required)') + raise ValueError(f"Relative import {s!r} but package=None (required)") module_name, attribute_name = s, None else: - attribute_name, module_name = ( - None, package if package else attribute_name) + attribute_name, module_name = (None, package if package else attribute_name) if attribute_name: _ensure_identifier(attribute_name, full=s) @@ -209,13 +214,14 @@ def parse_symbol(s: str, *, def symbol_by_name( - name: SymbolArg[_T], - aliases: Mapping[str, str] = None, - imp: Any = None, - package: str = None, - sep: str = '.', - default: _T = None, - **kwargs: Any) -> _T: + name: SymbolArg[_T], + aliases: Mapping[str, str] = None, + imp: Any = None, + package: str = None, + sep: str = ".", + default: _T = None, + **kwargs: Any, +) -> _T: """Get symbol by qualified name. The name should be the full dot-separated path to the class:: @@ -260,16 +266,18 @@ def symbol_by_name( try: try: module = imp( # type: ignore - module_name or '', package=package, + module_name or "", + package=package, # kwargs can be used to extend symbol_by_name when a custom # `imp` function is used. # importib does not support additional arguments # beyond (name, package=None), so we have to silence # mypy error here. - **kwargs) + **kwargs, + ) except ValueError as exc: raise ValueError( - f'Cannot import {name!r}: {exc}', + f"Cannot import {name!r}: {exc}", ).with_traceback(sys.exc_info()[2]) if attribute_name: return cast(_T, getattr(module, attribute_name)) @@ -312,14 +320,12 @@ def load_extension_classes(namespace: str) -> Iterable[EntrypointExtension]: try: cls: Type = symbol_by_name(cls_name) except (ImportError, SyntaxError) as exc: - warnings.warn( - f'Cannot load {namespace} extension {cls_name!r}: {exc!r}') + warnings.warn(f"Cannot load {namespace} extension {cls_name!r}: {exc!r}") else: yield EntrypointExtension(name, cls) -def load_extension_class_names( - namespace: str) -> Iterable[RawEntrypointExtension]: +def load_extension_class_names(namespace: str) -> Iterable[RawEntrypointExtension]: """Get setuptools entrypoint extension class names. If the entrypoint is defined in ``setup.py`` as:: @@ -342,7 +348,7 @@ def load_extension_class_names( for ep in iter_entry_points(namespace): yield RawEntrypointExtension( ep.name, - ':'.join([ep.module_name, ep.attrs[0]]), + ":".join([ep.module_name, ep.attrs[0]]), ) @@ -361,10 +367,9 @@ def cwd_in_path() -> Generator: sys.path.remove(cwd) -def import_from_cwd(module: str, - *, - imp: Callable = None, - package: str = None) -> ModuleType: +def import_from_cwd( + module: str, *, imp: Callable = None, package: str = None +) -> ModuleType: """Import module, temporarily including modules in the current directory. Modules located in the current directory has @@ -379,7 +384,7 @@ def import_from_cwd(module: str, def smart_import(path: str, imp: Any = None) -> Any: """Import module if module, otherwise same as :func:`symbol_by_name`.""" imp = importlib.import_module if imp is None else imp - if ':' in path: + if ":" in path: # Path includes attribute so can just jump # here (e.g., ``os.path:abspath``). return symbol_by_name(path, imp=imp) diff --git a/mode/utils/locals.py b/mode/utils/locals.py index e60bbfcc..19214517 100644 --- a/mode/utils/locals.py +++ b/mode/utils/locals.py @@ -14,7 +14,7 @@ from contextvars import ContextVar from typing import Generator, Generic, List, Optional, Sequence, TypeVar -__all__ = ['LocalStack'] +__all__ = ["LocalStack"] # LocalStack is a generic type, # so for a stack keeping track of web requests you may define: @@ -23,7 +23,7 @@ # # If the stack is a ``List[T]``, then the type variable T denotes the # type this stack contains. -T = TypeVar('T') +T = TypeVar("T") class LocalStack(Generic[T]): @@ -38,7 +38,7 @@ class LocalStack(Generic[T]): _stack: ContextVar[Optional[List[T]]] def __init__(self) -> None: - self._stack = ContextVar('_stack') + self._stack = ContextVar("_stack") # XXX mypy bug; when fixed type Generator, should be ContextManager. @contextmanager diff --git a/mode/utils/locks.py b/mode/utils/locks.py index 5c42ee46..8b56fc6b 100644 --- a/mode/utils/locks.py +++ b/mode/utils/locks.py @@ -7,6 +7,7 @@ import asyncio from collections import deque from typing import Optional + from .typing import Deque @@ -30,10 +31,10 @@ def __init__(self, *, loop: asyncio.AbstractEventLoop = None) -> None: def __repr__(self) -> str: res = super().__repr__() - extra = 'set' if self._value else 'unset' + extra = "set" if self._value else "unset" if self._waiters: - extra = f'{extra}, waiters:{len(self._waiters)}' - return f'<{res[1:-1]} [{extra}]>' + extra = f"{extra}, waiters:{len(self._waiters)}" + return f"<{res[1:-1]} [{extra}]>" def is_set(self) -> bool: """Return True if and only if the internal flag is true.""" diff --git a/mode/utils/logging.py b/mode/utils/logging.py index 1422f324..96519f08 100644 --- a/mode/utils/logging.py +++ b/mode/utils/logging.py @@ -16,6 +16,7 @@ from time import asctime from types import TracebackType from typing import ( + IO, Any, AnyStr, BinaryIO, @@ -23,7 +24,6 @@ ClassVar, ContextManager, Dict, - IO, Iterable, Iterator, List, @@ -37,6 +37,9 @@ Union, cast, ) + +import colorlog + from .contexts import ExitStack from .futures import all_tasks, current_task from .locals import LocalStack @@ -45,90 +48,89 @@ from .tracebacks import format_task_stack, print_task_stack from .typing import Protocol -import colorlog - __all__ = [ - 'CompositeLogger', - 'ExtensionFormatter', - 'FileLogProxy', - 'FormatterHandler', - 'LogSeverityMixin', - 'Logwrapped', - 'Severity', - 'cry', - 'flight_recorder', - 'formatter', - 'formatter2', - 'get_logger', - 'level_name', - 'level_number', - 'redirect_logger', - 'redirect_stdouts', - 'setup_logging', + "CompositeLogger", + "ExtensionFormatter", + "FileLogProxy", + "FormatterHandler", + "LogSeverityMixin", + "Logwrapped", + "Severity", + "cry", + "flight_recorder", + "formatter", + "formatter2", + "get_logger", + "level_name", + "level_number", + "redirect_logger", + "redirect_stdouts", + "setup_logging", ] HAS_STACKLEVEL = sys.version_info >= (3, 8) -DEVLOG: bool = bool(os.environ.get('DEVLOG', '')) -DEFAULT_FORMAT: str = ''' +DEVLOG: bool = bool(os.environ.get("DEVLOG", "")) +DEFAULT_FORMAT: str = """ [%(asctime)s] [%(process)s] [%(levelname)s]: %(message)s %(extra)s -'''.strip() +""".strip() -DEFAULT_COLOR_FORMAT = ''' +DEFAULT_COLOR_FORMAT = """ [%(asctime)s] [%(process)s] [%(levelname)s] %(log_color)s%(message)s %(extra)s -'''.strip() +""".strip() DEFAULT_COLORS = { **colorlog.default_log_colors, - 'INFO': 'white', - 'DEBUG': 'blue', + "INFO": "white", + "DEBUG": "blue", } DEFAULT_FORMATTERS = { - 'default': { - '()': 'mode.utils.logging.DefaultFormatter', - 'format': DEFAULT_FORMAT, + "default": { + "()": "mode.utils.logging.DefaultFormatter", + "format": DEFAULT_FORMAT, }, - 'colored': { - '()': 'mode.utils.logging.ExtensionFormatter', - 'format': DEFAULT_COLOR_FORMAT, - 'log_colors': DEFAULT_COLORS, - 'stream': sys.stdout, + "colored": { + "()": "mode.utils.logging.ExtensionFormatter", + "format": DEFAULT_COLOR_FORMAT, + "log_colors": DEFAULT_COLORS, + "stream": sys.stdout, }, } -current_flight_recorder_stack: LocalStack['flight_recorder'] +current_flight_recorder_stack: LocalStack["flight_recorder"] current_flight_recorder_stack = LocalStack() -def current_flight_recorder() -> Optional['flight_recorder']: +def current_flight_recorder() -> Optional["flight_recorder"]: return current_flight_recorder_stack.top -def _logger_config(handlers: List[str], - level: Union[str, int] = 'INFO') -> Dict: +def _logger_config(handlers: List[str], level: Union[str, int] = "INFO") -> Dict: return { - 'handlers': handlers, - 'level': level, + "handlers": handlers, + "level": level, } -def create_logconfig(version: int = 1, - disable_existing_loggers: bool = False, - formatters: Dict = DEFAULT_FORMATTERS, - handlers: Dict = None, - root: Dict = None) -> Dict: +def create_logconfig( + version: int = 1, + disable_existing_loggers: bool = False, + formatters: Dict = DEFAULT_FORMATTERS, + handlers: Dict = None, + root: Dict = None, +) -> Dict: return { - 'version': version, + "version": version, # do not disable existing loggers from other modules. # see https://www.caktusgroup.com/blog/2015/01/27/ # Django-Logging-Configuration-logging_config-default-settings-logger/ - 'disable_existing_loggers': disable_existing_loggers, - 'formatters': formatters, - 'handlers': handlers, - 'root': root, + "disable_existing_loggers": disable_existing_loggers, + "formatters": formatters, + "handlers": handlers, + "root": root, } @@ -151,16 +153,12 @@ def get_logger(name: str) -> Logger: return logger -redirect_logger = get_logger('mode.redirect') +redirect_logger = get_logger("mode.redirect") class HasLog(Protocol): - @abc.abstractmethod - def log(self, - severity: int, - message: str, - *args: Any, **kwargs: Any) -> None: + def log(self, severity: int, message: str, *args: Any, **kwargs: Any) -> None: ... @@ -189,50 +187,48 @@ class LogSeverityMixin(LogSeverityMixinBase): def dev(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) if DEVLOG: self.log(logging.INFO, message, *args, **kwargs) def debug(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.DEBUG, message, *args, **kwargs) def info(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.INFO, message, *args, **kwargs) def warn(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.WARN, message, *args, **kwargs) def warning(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.WARN, message, *args, **kwargs) def error(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.ERROR, message, *args, **kwargs) def crit(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.CRITICAL, message, *args, **kwargs) - def critical(self: HasLog, message: str, - *args: Any, **kwargs: Any) -> None: + def critical(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.CRITICAL, message, *args, **kwargs) - def exception(self: HasLog, message: str, - *args: Any, **kwargs: Any) -> None: + def exception(self: HasLog, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) self.log(logging.ERROR, message, *args, exc_info=1, **kwargs) @@ -274,22 +270,18 @@ def _format_log(self, severity: int, message: str, logger: Logger - def __init__(self, - logger: Logger, - formatter: Callable[..., str] = None) -> None: + def __init__(self, logger: Logger, formatter: Callable[..., str] = None) -> None: self.logger = logger self.formatter: Optional[Callable[..., str]] = formatter - def log(self, severity: int, message: str, - *args: Any, **kwargs: Any) -> None: + def log(self, severity: int, message: str, *args: Any, **kwargs: Any) -> None: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 2) - self.logger.log(severity, - self.format(severity, message, *args, **kwargs), - *args, **kwargs) + kwargs.setdefault("stacklevel", 2) + self.logger.log( + severity, self.format(severity, message, *args, **kwargs), *args, **kwargs + ) - def format(self, severity: int, message: str, - *args: Any, **kwargs: Any) -> str: + def format(self, severity: int, message: str, *args: Any, **kwargs: Any) -> str: if self.formatter: return self.formatter(severity, message, *args, **kwargs) return message @@ -316,9 +308,7 @@ def formatter2(fun: FormatterHandler2) -> FormatterHandler2: def _format_extra(record: logging.LogRecord) -> str: - return ', '.join( - f'{k}={v!r}' for k, v in record.__dict__.get('data', {}).items() - ) + return ", ".join(f"{k}={v!r}" for k, v in record.__dict__.get("data", {}).items()) class DefaultFormatter(logging.Formatter): @@ -329,7 +319,7 @@ def format(self, record: logging.LogRecord) -> str: return super().format(record) -class ExtensionFormatter(colorlog.TTYColoredFormatter): # type: ignore +class ExtensionFormatter(colorlog.TTYColoredFormatter): """Formatter that can register callbacks to format args. Extends :pypi:`colorlog`. @@ -341,15 +331,13 @@ def __init__(self, stream: IO = None, **kwargs: Any) -> None: def format(self, record: logging.LogRecord) -> str: self._format_args(record) record.extra = _format_extra(record) # type: ignore - return cast(str, super().format(record)) + return cast(str, super().format(record)) # type: ignore def _format_args(self, record: logging.LogRecord) -> None: format_arg = self.format_arg if isinstance(record.args, Mapping): # logger.log(severity, "msg %(foo)s", foo=303) - record.args = { - k: format_arg(v, record) for k, v in record.args.items() - } + record.args = {k: format_arg(v, record) for k, v in record.args.items()} else: if not isinstance(record.args, tuple): # logger.log(severity, "msg %s", foo) @@ -357,9 +345,7 @@ def _format_args(self, record: logging.LogRecord) -> None: # always Tuple record.args = (record.args,) # type: ignore # logger.log(severity, "msg %s", ('foo',)) - record.args = tuple( - format_arg(arg, record) for arg in record.args - ) + record.args = tuple(format_arg(arg, record) for arg in record.args) def format_arg(self, arg: Any, record: logging.LogRecord) -> Any: return self._format_arg2(self._format_arg(arg), record) @@ -403,11 +389,12 @@ def _(loglevel: str) -> int: def setup_logging( - *, - loglevel: Union[str, int] = None, - logfile: Union[str, IO] = None, - loghandlers: List[logging.Handler] = None, - logging_config: Dict = None) -> int: + *, + loglevel: Union[str, int] = None, + logfile: Union[str, IO] = None, + loghandlers: List[logging.Handler] = None, + logging_config: Dict = None, +) -> int: """Configure logging subsystem.""" stream: Optional[IO] = None _loglevel: int = level_number(loglevel) @@ -433,42 +420,50 @@ def setup_logging( return _loglevel -def _setup_logging(*, - level: Union[int, str] = None, - filename: str = None, - stream: IO = None, - loghandlers: List[logging.Handler] = None, - logging_config: Dict = None) -> None: +def _setup_logging( + *, + level: Union[int, str] = None, + filename: str = None, + stream: IO = None, + loghandlers: List[logging.Handler] = None, + logging_config: Dict = None, +) -> None: handlers = {} if filename: assert stream is None - handlers.update({ - 'default': { - 'level': level, - 'class': 'logging.FileHandler', - 'formatter': 'default', - 'filename': filename, - }, - }) + handlers.update( + { + "default": { + "level": level, + "class": "logging.FileHandler", + "formatter": "default", + "filename": filename, + }, + } + ) elif stream: - handlers.update({ - 'default': { - 'level': level, - 'class': 'colorlog.StreamHandler', - 'formatter': 'colored', - }, - }) - config = create_logconfig(handlers=handlers, root={ - 'level': level, - 'handlers': ['default'], - }) + handlers.update( + { + "default": { + "level": level, + "class": "colorlog.StreamHandler", + "formatter": "colored", + }, + } + ) + config = create_logconfig( + handlers=handlers, + root={ + "level": level, + "handlers": ["default"], + }, + ) if logging_config is None: logging_config = config - elif logging_config.pop('merge', False): + elif logging_config.pop("merge", False): logging_config = {**config, **logging_config} - for k in ('formatters', 'filters', 'handlers', 'loggers', 'root'): - logging_config[k] = {**config.get(k, {}), - **logging_config.get(k, {})} + for k in ("formatters", "filters", "handlers", "loggers", "root"): + logging_config[k] = {**config.get(k, {}), **logging_config.get(k, {})} logging.config.dictConfig(logging_config) if loghandlers is not None: logging.root.handlers.extend(loghandlers) @@ -482,13 +477,11 @@ class Logwrapped(object): severity: int ident: str - _ignore: ClassVar[Set[str]] = {'__enter__', '__exit__'} + _ignore: ClassVar[Set[str]] = {"__enter__", "__exit__"} - def __init__(self, - obj: Any, - logger: Any = None, - severity: Severity = None, - ident: str = '') -> None: + def __init__( + self, obj: Any, logger: Any = None, severity: Severity = None, ident: str = "" + ) -> None: self.obj = obj self.logger = logger self.severity = level_number(severity) if severity else logging.WARN @@ -497,25 +490,24 @@ def __init__(self, def __getattr__(self, key: str) -> Any: meth = getattr(self.obj, key) - ignore = object.__getattribute__(self, '_ignore') + ignore = object.__getattribute__(self, "_ignore") if not callable(meth) or key in ignore: return meth @wraps(meth) def __wrapped(*args: Any, **kwargs: Any) -> Any: - info = '' + info = "" if self.ident: info += self.ident.format(self.obj) - info += f'{meth.__name__}(' + info += f"{meth.__name__}(" if args: - info += ', '.join(map(repr, args)) + info += ", ".join(map(repr, args)) if kwargs: if args: - info += ', ' - info += ', '.join(f'{key}={value!r}' - for key, value in kwargs.items()) - info += ')' + info += ", " + info += ", ".join(f"{key}={value!r}" for key, value in kwargs.items()) + info += ")" self.logger.log(self.severity, info) return meth(*args, **kwargs) @@ -528,12 +520,9 @@ def __dir__(self) -> List[str]: return dir(self.obj) -def cry(file: IO, - *, - sep1: str = '=', - sep2: str = '-', - sep3: str = '~', - seplen: int = 49) -> None: # pragma: no cover +def cry( + file: IO, *, sep1: str = "=", sep2: str = "-", sep3: str = "~", seplen: int = 49 +) -> None: # pragma: no cover """Return stack-trace of all active threads. See Also: @@ -554,37 +543,37 @@ def cry(file: IO, if thread.ident == current_thread.ident: loop = asyncio.get_event_loop() else: - loop = getattr(thread, 'loop', None) - print(f'THREAD {thread.name}', file=file) # noqa: T003 - print(sep1, file=file) # noqa: T003 + loop = getattr(thread, "loop", None) + print(f"THREAD {thread.name}", file=file) # noqa: T003 + print(sep1, file=file) # noqa: T003 traceback.print_stack(frame, file=file) - print(sep2, file=file) # noqa: T003 - print('LOCAL VARIABLES', file=file) # noqa: T003 - print(sep2, file=file) # noqa: T003 + print(sep2, file=file) # noqa: T003 + print("LOCAL VARIABLES", file=file) # noqa: T003 + print(sep2, file=file) # noqa: T003 pprint(frame.f_locals, stream=file) if loop is not None: - print('TASKS', file=file) + print("TASKS", file=file) print(sep2, file=file) for task in all_tasks(loop=loop): print_task_name(task, file=file) - print(f' {sep3}', file=file) + print(f" {sep3}", file=file) print_task_stack(task, file=file, capture_locals=True) - print('\n', file=file) # noqa: T003 + print("\n", file=file) # noqa: T003 def print_task_name(task: asyncio.Task, file: IO) -> None: """Print name of :class:`asyncio.Task` in tracebacks.""" coro = task._coro # type: ignore - wrapped = getattr(task, '__wrapped__', None) - coro_name = getattr(coro, '__name__', None) + wrapped = getattr(task, "__wrapped__", None) + coro_name = getattr(coro, "__name__", None) if coro_name is None: # some coroutines does not have a __name__ attribute # e.g. async_generator_asend coro_name = repr(coro) - print(f' TASK {coro_name}', file=file) + print(f" TASK {coro_name}", file=file) if wrapped: - print(f' -> {wrapped}', file=file) - print(f' {task!r}', file=file) + print(f" -> {wrapped}", file=file) + print(f" {task!r}", file=file) class LogMessage(NamedTuple): @@ -674,9 +663,9 @@ def _background_refresh(self) -> None: _logs: List[LogMessage] _default_context: Dict[str, Any] - def __init__(self, logger: Any, *, - timeout: Seconds, - loop: asyncio.AbstractEventLoop = None) -> None: + def __init__( + self, logger: Any, *, timeout: Seconds, loop: asyncio.AbstractEventLoop = None + ) -> None: self.id = next(self._id_source) self.logger = logger self.timeout = want_seconds(timeout) @@ -705,7 +694,7 @@ def wrap(self, severity: int, obj: Any) -> Logwrapped: def activate(self) -> None: if self._fut: - raise RuntimeError('{type(self).__name__} already activated') + raise RuntimeError("{type(self).__name__} already activated") self.enabled_by = current_task() self.started_at_date = asctime() current_flight_recorder = current_flight_recorder_stack.top @@ -720,17 +709,15 @@ def cancel(self) -> None: if fut is not None: fut.cancel() - def log(self, severity: int, message: str, - *args: Any, **kwargs: Any) -> None: + def log(self, severity: int, message: str, *args: Any, **kwargs: Any) -> None: if self._fut: self._buffer_log(severity, message, args, kwargs) else: if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 2) + kwargs.setdefault("stacklevel", 2) self.logger.log(severity, message, *args, **kwargs) - def _buffer_log(self, severity: int, message: str, - args: Any, kwargs: Any) -> None: + def _buffer_log(self, severity: int, message: str, args: Any, kwargs: Any) -> None: log = LogMessage(severity, message, asctime(), args, kwargs) self._logs.append(log) @@ -746,20 +733,20 @@ def blush(self) -> None: try: logger = self.logger ident = self._ident() - logger.warning('Warning: Task timed out!') - logger.warning( - "Please make sure it's hanging before restart.") - logger.info('[%s] (started at %s) Replaying logs...', - ident, self.started_at_date) + logger.warning("Warning: Task timed out!") + logger.warning("Please make sure it's hanging before restart.") + logger.info( + "[%s] (started at %s) Replaying logs...", ident, self.started_at_date + ) self.flush_logs(ident=ident) - logger.info('[%s] -End of log-', ident) - logger.info('[%s] Task traceback', ident) + logger.info("[%s] -End of log-", ident) + logger.info("[%s] Task traceback", ident) if self.enabled_by is not None: logger.info(format_task_stack(self.enabled_by)) else: - logger.info('[%s] -missing-: not enabled by task') + logger.info("[%s] -missing-: not enabled by task") except Exception as exc: - logger.exception('Flight recorder internal error: %r', exc) + logger.exception("Flight recorder internal error: %r", exc) raise def flush_logs(self, ident: str = None) -> None: @@ -771,47 +758,44 @@ def flush_logs(self, ident: str = None) -> None: for sev, message, datestr, args, kwargs in logs: self._fill_extra_context(kwargs) logger.log( - sev, f'[%s] (%s) {message}', ident, datestr, - *args, **kwargs) + sev, f"[%s] (%s) {message}", ident, datestr, *args, **kwargs + ) finally: logs.clear() def _fill_extra_context(self, kwargs: Dict) -> None: if self.extra_context: - extra = kwargs['extra'] = kwargs.get('extra') or {} - extra['data'] = { + extra = kwargs["extra"] = kwargs.get("extra") or {} + extra["data"] = { **self.extra_context, - **(extra.get('data') or {}), + **(extra.get("data") or {}), } def _ident(self) -> str: - return f'{title(type(self).__name__)}-{self.id}' + return f"{title(type(self).__name__)}-{self.id}" def __repr__(self) -> str: - return f'<{self._ident()} @{id(self):#x}>' + return f"<{self._ident()} @{id(self):#x}>" - def __enter__(self) -> 'flight_recorder': + def __enter__(self) -> "flight_recorder": self.activate() - self.exit_stack.enter_context( - current_flight_recorder_stack.push(self)) + self.exit_stack.enter_context(current_flight_recorder_stack.push(self)) self.exit_stack.__enter__() return self - def __exit__(self, - exc_type: Type[BaseException] = None, - exc_val: BaseException = None, - exc_tb: TracebackType = None) -> Optional[bool]: + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: TracebackType = None, + ) -> Optional[bool]: self.exit_stack.__exit__(exc_type, exc_val, exc_tb) self.cancel() return None class _FlightRecorderProxy(LogSeverityMixin): - - def log(self, - severity: int, - message: str, - *args: Any, **kwargs: Any) -> None: + def log(self, severity: int, message: str, *args: Any, **kwargs: Any) -> None: fl = self.current_flight_recorder() if fl is not None: return fl.log(severity, message, *args, **kwargs) @@ -844,17 +828,16 @@ def _safewrap_handler(self, handler: logging.Handler) -> None: # :data:`sys.__stderr__` instead of :data:`sys.stderr` to circumvent # infinite loops. class WithSafeHandleError(logging.Handler): - def handleError(self, record: logging.LogRecord) -> None: try: traceback.print_exc(None, sys.__stderr__) except IOError: - pass # see python issue 5971 + pass # see python issue 5971 handler.handleError = WithSafeHandleError().handleError # type: ignore def write(self, s: AnyStr) -> int: - if not getattr(self._threadlocal, 'recurse_protection', False): + if not getattr(self._threadlocal, "recurse_protection", False): data = s.strip() if data and not self.closed: self._threadlocal.recurse_protection = True @@ -892,11 +875,11 @@ def flush(self) -> None: @property def mode(self) -> str: - return 'w' + return "w" @property def name(self) -> str: - return '' + return "" def close(self) -> None: self._closed = True @@ -944,21 +927,26 @@ def __iter__(self) -> Iterator[str]: def __next__(self) -> str: raise NotImplementedError() - def __enter__(self) -> 'FileLogProxy': + def __enter__(self) -> "FileLogProxy": return self - def __exit__(self, - exc_type: Type[BaseException] = None, - exc_val: BaseException = None, - exc_tb: TracebackType = None) -> Optional[bool]: + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: TracebackType = None, + ) -> Optional[bool]: ... @contextmanager -def redirect_stdouts(logger: Logger = redirect_logger, *, - severity: Severity = None, - stdout: bool = True, - stderr: bool = True) -> Iterator[FileLogProxy]: +def redirect_stdouts( + logger: Logger = redirect_logger, + *, + severity: Severity = None, + stdout: bool = True, + stderr: bool = True, +) -> Iterator[FileLogProxy]: """Redirect :data:`sys.stdout` and :data:`sys.stdout` to logger.""" proxy = FileLogProxy(logger, severity=severity) if stdout: diff --git a/mode/utils/loops.py b/mode/utils/loops.py index 723780cc..2790cd13 100644 --- a/mode/utils/loops.py +++ b/mode/utils/loops.py @@ -2,7 +2,7 @@ import asyncio from typing import Any, Callable -__all__ = ['clone_loop', 'call_asap'] +__all__ = ["clone_loop", "call_asap"] def _is_unix_loop(loop: asyncio.AbstractEventLoop) -> bool: @@ -20,47 +20,50 @@ def clone_loop(loop: asyncio.AbstractEventLoop) -> asyncio.AbstractEventLoop: if _is_unix_loop(loop): for signum, handle in loop._signal_handlers.items(): # type: ignore new_loop.add_signal_handler( - signum, _appropriate_signal_handler(loop, handle)) + signum, _appropriate_signal_handler(loop, handle) + ) return new_loop def _appropriate_signal_handler( - parent_loop: asyncio.AbstractEventLoop, - handle: asyncio.Handle) -> Callable: + parent_loop: asyncio.AbstractEventLoop, handle: asyncio.Handle +) -> Callable: callback = handle._callback # type: ignore - context = getattr(handle, '_context', None) # CPython 3.7+ + context = getattr(handle, "_context", None) # CPython 3.7+ callback_args = handle._args def _call_using_parent_loop() -> None: _call_asap(parent_loop, callback, *callback_args, context=context) + return _call_using_parent_loop -def call_asap(callback: Callable, - *args: Any, - context: Any = None, - loop: asyncio.AbstractEventLoop = None) -> asyncio.Handle: +def call_asap( + callback: Callable, + *args: Any, + context: Any = None, + loop: asyncio.AbstractEventLoop = None +) -> asyncio.Handle: """Call function asap by pushing at the front of the line.""" assert loop if _is_unix_loop(loop): return _call_asap(loop, callback, *args, context=context) if context is not None: return loop.call_soon_threadsafe( # type: ignore - callback, *args, context=context) + callback, *args, context=context + ) return loop.call_soon_threadsafe(callback, *args) -def _call_asap(loop: Any, - callback: Callable, - *args: Any, - context: Any = None) -> asyncio.Handle: +def _call_asap( + loop: Any, callback: Callable, *args: Any, context: Any = None +) -> asyncio.Handle: loop._check_closed() if loop._debug: - loop._check_callback(callback, 'call_soon_threadsafe') + loop._check_callback(callback, "call_soon_threadsafe") loop._call_soon(callback, args, context) if context is not None: - handle = asyncio.Handle( # type: ignore - callback, list(args), loop, context) + handle = asyncio.Handle(callback, list(args), loop, context) # type: ignore else: handle = asyncio.Handle(callback, list(args), loop) if handle._source_traceback: # type: ignore diff --git a/mode/utils/mocks.py b/mode/utils/mocks.py index eda8ae51..542ab0d7 100644 --- a/mode/utils/mocks.py +++ b/mode/utils/mocks.py @@ -21,19 +21,19 @@ ) __all__ = [ - 'ANY', - 'IN', - 'AsyncMagicMock', - 'AsyncMock', - 'AsyncContextMock', - 'ContextMock', - 'FutureMock', - 'MagicMock', - 'Mock', - 'call', - 'mask_module', - 'patch', - 'patch_module', + "ANY", + "IN", + "AsyncMagicMock", + "AsyncMock", + "AsyncContextMock", + "ContextMock", + "FutureMock", + "MagicMock", + "Mock", + "call", + "mask_module", + "patch", + "patch_module", ] MOCK_CALL_COUNT = count(0) @@ -58,8 +58,8 @@ def __ne__(self, other: Any) -> bool: return other not in self.alternatives def __repr__(self) -> str: - sep = ' | ' - return f'' + sep = " | " + return f"" class Mock(unittest.mock.Mock): @@ -92,21 +92,23 @@ class _ContextMock(Mock, ContextManager): in the class, not just the instance. """ - def __enter__(self) -> '_ContextMock': + def __enter__(self) -> "_ContextMock": return self - def __exit__(self, - exc_type: Type[BaseException] = None, - exc_val: BaseException = None, - exc_tb: types.TracebackType = None) -> Optional[bool]: + def __exit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: types.TracebackType = None, + ) -> Optional[bool]: pass def ContextMock(*args: Any, **kwargs: Any) -> _ContextMock: """Mock that mocks :keyword:`with` statement contexts.""" obj = _ContextMock(*args, **kwargs) - obj.attach_mock(_ContextMock(), '__enter__') - obj.attach_mock(_ContextMock(), '__exit__') + obj.attach_mock(_ContextMock(), "__enter__") + obj.attach_mock(_ContextMock(), "__exit__") obj.__enter__.return_value = obj # type: ignore # if __exit__ return a value the exception is ignored, # so it must return None here. @@ -117,24 +119,20 @@ def ContextMock(*args: Any, **kwargs: Any) -> _ContextMock: class AsyncMock(unittest.mock.Mock): """Mock for ``async def`` function/method or anything awaitable.""" - def __init__(self, *args: Any, - name: str = None, - **kwargs: Any) -> None: + def __init__(self, *args: Any, name: str = None, **kwargs: Any) -> None: super().__init__(name=name) coro = Mock(*args, **kwargs) - self.attach_mock(coro, 'coro') + self.attach_mock(coro, "coro") self.side_effect = coroutine(coro) class AsyncMagicMock(unittest.mock.MagicMock): """A magic mock type for ``async def`` functions/methods.""" - def __init__(self, *args: Any, - name: str = None, - **kwargs: Any) -> None: + def __init__(self, *args: Any, name: str = None, **kwargs: Any) -> None: super().__init__(name=name) coro = MagicMock(*args, **kwargs) - self.attach_mock(coro, 'coro') + self.attach_mock(coro, "coro") self.side_effect = coroutine(coro) @@ -183,11 +181,14 @@ async def test_session(session): assert await response.json() == {'hello': 'json'} """ - def __init__(self, *args: Any, - aenter_return: Any = None, - aexit_return: Any = None, - side_effect: Union[Callable, BaseException] = None, - **kwargs: Any) -> None: + def __init__( + self, + *args: Any, + aenter_return: Any = None, + aexit_return: Any = None, + side_effect: Union[Callable, BaseException] = None, + **kwargs: Any, + ) -> None: super().__init__(*args, **kwargs) self.aenter_return = aenter_return self.aexit_return = aexit_return @@ -237,7 +238,6 @@ def patch_module(*names: str, new_callable: Any = Mock) -> Iterator: prev = {} class MockModule(types.ModuleType): - def __getattr__(self, attr: str) -> Any: setattr(self, attr, new_callable()) return types.ModuleType.__getattribute__(self, attr) @@ -258,7 +258,7 @@ def __getattr__(self, attr: str) -> Any: sys.modules[name] = prev[name] except KeyError: try: - del(sys.modules[name]) + del sys.modules[name] except KeyError: pass @@ -288,7 +288,7 @@ def mask_module(*modnames: str) -> Iterator: def myimp(name: str, *args: Any, **kwargs: Any) -> ModuleType: if name in modnames: - raise ImportError(f'No module named {name}') + raise ImportError(f"No module named {name}") else: return cast(ModuleType, realimport(name, *args, **kwargs)) @@ -309,7 +309,7 @@ class _Call(unittest.mock._Call): # but call.__doc__ returns a mocked method and not the class attribute. def __getattr__(self, attr: str) -> Any: - if attr == '__doc__': + if attr == "__doc__": return unittest.mock._Call.__doc__ return super().__getattr__(attr) diff --git a/mode/utils/objects.py b/mode/utils/objects.py index 0f2476de..dcb1a45d 100644 --- a/mode/utils/objects.py +++ b/mode/utils/objects.py @@ -29,7 +29,22 @@ TypeVar, cast, ) -from typing import _eval_type, _type_check # type: ignore + +try: + from typing import _eval_type # type: ignore +except ImportError: + + def _eval_type(t, globalns, localns, recursive_guard=frozenset()): # type: ignore + return t + + +try: + from typing import _type_check # type: ignore +except ImportError: + + def _type_check(arg, msg, is_argument=True, module=None): # type: ignore + return arg + try: from typing import _ClassVar # type: ignore @@ -39,12 +54,16 @@ def _is_class_var(x: Any) -> bool: # noqa return isinstance(x, _GenericAlias) and x.__origin__ is ClassVar + + else: # pragma: no cover # CPython 3.6 def _is_class_var(x: Any) -> bool: return type(x) is _ClassVar + if typing.TYPE_CHECKING: + class ForwardRef: # noqa __forward_arg__: str __forward_evaluated__: bool @@ -53,6 +72,8 @@ class ForwardRef: # noqa def __init__(self, arg: str, is_argument: bool = True) -> None: ... + + else: try: # CPython 3.7 @@ -62,48 +83,50 @@ def __init__(self, arg: str, is_argument: bool = True) -> None: from typing import _ForwardRef as ForwardRef __all__ = [ - 'FieldMapping', - 'DefaultsMapping', - 'Unordered', - 'KeywordReduce', - 'InvalidAnnotation', - 'abc_compatible_with_init_subclass', - 'qualname', - 'shortname', - 'canoname', - 'canonshortname', - 'annotations', - 'eval_type', - 'iter_mro_reversed', - 'guess_polymorphic_type', - 'cached_property', - 'label', - 'shortlabel', + "FieldMapping", + "DefaultsMapping", + "Unordered", + "KeywordReduce", + "InvalidAnnotation", + "abc_compatible_with_init_subclass", + "qualname", + "shortname", + "canoname", + "canonshortname", + "annotations", + "eval_type", + "iter_mro_reversed", + "guess_polymorphic_type", + "cached_property", + "label", + "shortlabel", ] # Workaround for https://bugs.python.org/issue29581 try: + @typing.no_type_check # type: ignore class _InitSubclassCheck(metaclass=abc.ABCMeta): ident: int - def __init_subclass__(self, - *args: Any, - ident: int = 808, - **kwargs: Any) -> None: + def __init_subclass__( + self, *args: Any, ident: int = 808, **kwargs: Any + ) -> None: self.ident = ident super().__init__(*args, **kwargs) @typing.no_type_check # type: ignore class _UsingKwargsInNew(_InitSubclassCheck, ident=909): ... + + except TypeError: abc_compatible_with_init_subclass = False else: abc_compatible_with_init_subclass = True -_T = TypeVar('_T') -RT = TypeVar('RT') +_T = TypeVar("_T") +RT = TypeVar("RT") #: Mapping of attribute name to attribute type. FieldMapping = Mapping[str, Type] @@ -153,7 +176,7 @@ def __le__(self, other: Any) -> bool: return True def __repr__(self) -> str: - return f'<{type(self).__name__}: {self.value!r}>' + return f"<{type(self).__name__}: {self.value!r}>" def _restore_from_keywords(typ: Type, kwargs: Dict) -> Any: @@ -206,64 +229,65 @@ def __reduce__(self) -> Tuple: def qualname(obj: Any) -> str: """Get object qualified name.""" - if not hasattr(obj, '__name__') and hasattr(obj, '__class__'): + if not hasattr(obj, "__name__") and hasattr(obj, "__class__"): obj = obj.__class__ - name = getattr(obj, '__qualname__', obj.__name__) - return '.'.join((obj.__module__, name)) + name = getattr(obj, "__qualname__", obj.__name__) + return ".".join((obj.__module__, name)) def shortname(obj: Any) -> str: """Get object name (non-qualified).""" - if not hasattr(obj, '__name__') and hasattr(obj, '__class__'): + if not hasattr(obj, "__name__") and hasattr(obj, "__class__"): obj = obj.__class__ - return '.'.join((obj.__module__, obj.__name__)) + return ".".join((obj.__module__, obj.__name__)) def canoname(obj: Any, *, main_name: str = None) -> str: """Get qualname of obj, trying to resolve the real name of ``__main__``.""" name = qualname(obj) - parts = name.split('.') - if parts[0] == '__main__': - return '.'.join([main_name or _detect_main_name()] + parts[1:]) + parts = name.split(".") + if parts[0] == "__main__": + return ".".join([main_name or _detect_main_name()] + parts[1:]) return name def canonshortname(obj: Any, *, main_name: str = None) -> str: """Get non-qualified name of obj, resolve real name of ``__main__``.""" name = shortname(obj) - parts = name.split('.') - if parts[0] == '__main__': - return '.'.join([main_name or _detect_main_name()] + parts[1:]) + parts = name.split(".") + if parts[0] == "__main__": + return ".".join([main_name or _detect_main_name()] + parts[1:]) return name def _detect_main_name() -> str: # pragma: no cover try: - filename = sys.modules['__main__'].__file__ + filename = sys.modules["__main__"].__file__ except (AttributeError, KeyError): # ipython/REPL - return '__main__' + return "__main__" else: path = Path(filename).absolute() node = path.parent seen = [] while node: - if (node / '__init__.py').exists(): + if (node / "__init__.py").exists(): seen.append(node.stem) node = node.parent else: break - return '.'.join(seen + [path.stem]) - - -def annotations(cls: Type, - *, - stop: Type = object, - invalid_types: Set = None, - alias_types: Mapping = None, - skip_classvar: bool = False, - globalns: Dict[str, Any] = None, - localns: Dict[str, Any] = None) -> Tuple[ - FieldMapping, DefaultsMapping]: + return ".".join(seen + [path.stem]) + + +def annotations( + cls: Type, + *, + stop: Type = object, + invalid_types: Set = None, + alias_types: Mapping = None, + skip_classvar: bool = False, + globalns: Dict[str, Any] = None, + localns: Dict[str, Any] = None, +) -> Tuple[FieldMapping, DefaultsMapping]: """Get class field definition in MRO order. Arguments: @@ -311,25 +335,28 @@ def annotations(cls: Type, for subcls in iter_mro_reversed(cls, stop=stop): defaults.update(subcls.__dict__) with suppress(AttributeError): - fields.update(local_annotations( - subcls, - invalid_types=invalid_types, - alias_types=alias_types, - skip_classvar=skip_classvar, - globalns=globalns, - localns=localns, - )) + fields.update( + local_annotations( + subcls, + invalid_types=invalid_types, + alias_types=alias_types, + skip_classvar=skip_classvar, + globalns=globalns, + localns=localns, + ) + ) return fields, defaults def local_annotations( - cls: Type, - *, - invalid_types: Set = None, - alias_types: Mapping = None, - skip_classvar: bool = False, - globalns: Dict[str, Any] = None, - localns: Dict[str, Any] = None) -> Iterable[Tuple[str, Type]]: + cls: Type, + *, + invalid_types: Set = None, + alias_types: Mapping = None, + skip_classvar: bool = False, + globalns: Dict[str, Any] = None, + localns: Dict[str, Any] = None, +) -> Iterable[Tuple[str, Type]]: return _resolve_refs( cls.__annotations__, globalns if globalns is not None else _get_globalns(cls), @@ -340,12 +367,14 @@ def local_annotations( ) -def _resolve_refs(d: Dict[str, Any], - globalns: Dict[str, Any] = None, - localns: Dict[str, Any] = None, - invalid_types: Set = None, - alias_types: Mapping = None, - skip_classvar: bool = False) -> Iterable[Tuple[str, Type]]: +def _resolve_refs( + d: Dict[str, Any], + globalns: Dict[str, Any] = None, + localns: Dict[str, Any] = None, + invalid_types: Set = None, + alias_types: Mapping = None, + skip_classvar: bool = False, +) -> Iterable[Tuple[str, Type]]: invalid_types = invalid_types or set() alias_types = alias_types or {} for k, v in d.items(): @@ -356,11 +385,13 @@ def _resolve_refs(d: Dict[str, Any], yield k, v -def eval_type(typ: Any, - globalns: Dict[str, Any] = None, - localns: Dict[str, Any] = None, - invalid_types: Set = None, - alias_types: Mapping = None) -> Type: +def eval_type( + typ: Any, + globalns: Dict[str, Any] = None, + localns: Dict[str, Any] = None, + invalid_types: Set = None, + alias_types: Mapping = None, +) -> Type: """Convert (possible) string annotation to actual type. Examples: @@ -379,9 +410,9 @@ def eval_type(typ: Any, return alias_types.get(typ, typ) -def _ForwardRef_safe_eval(ref: ForwardRef, - globalns: Dict[str, Any] = None, - localns: Dict[str, Any] = None) -> Type: +def _ForwardRef_safe_eval( + ref: ForwardRef, globalns: Dict[str, Any] = None, localns: Dict[str, Any] = None +) -> Type: # On 3.6/3.7 ForwardRef._evaluate crashes if str references ClassVar if not ref.__forward_evaluated__: if globalns is None and localns is None: @@ -390,10 +421,9 @@ def _ForwardRef_safe_eval(ref: ForwardRef, globalns = localns elif localns is None: localns = globalns - val = eval(ref.__forward_code__, globalns, localns) + val = eval(ref.__forward_code__, globalns, localns) # noqa: S307 if not _is_class_var(val): - val = _type_check(val, - 'Forward references must evaluate to types.') + val = _type_check(val, "Forward references must evaluate to types.") ref.__forward_value__ = val ref.__forward_evaluated__ = True return ref.__forward_value__ @@ -446,93 +476,68 @@ def remove_optional(typ: Type) -> Type: def is_union(typ: Type) -> bool: name = typ.__class__.__name__ - return ( - (name == '_GenericAlias' and typ.__origin__ is typing.Union) or # 3.7 - name == '_Union' # 3.6 + return any( + [ + name == "_UnionGenericAlias", # 3.9 + name == "_GenericAlias" and typ.__origin__ is typing.Union, # 3.7 + name == "_Union", # 3.6 + ], ) def is_optional(typ: Type) -> bool: - args = getattr(typ, '__args__', ()) - if typ.__class__.__name__ == '_GenericAlias': - # Py3.7 - if typ.__origin__ is typing.Union: - for arg in args: - if arg is type(None): # noqa - return True - elif typ.__class__.__name__ == '_Union': # pragma: no cover - # Py3.6 - # Optional[x] actually returns Union[x, type(None)] - if args and type(None) in args: # noqa - return True + if is_union(typ): + args = getattr(typ, "__args__", ()) + return any([True for arg in args if arg is None or arg is type(None)]) # noqa return False -def _remove_optional(typ: Type, *, - find_origin: bool = False) -> Tuple[List[Any], Type]: - args = getattr(typ, '__args__', ()) - if typ.__class__.__name__ == '_GenericAlias': - # 3.7 - if typ.__origin__ is typing.Union: - # Optional[List[int]] -> Union[List[int], NoneType] - found_None = False - union_type_args: Optional[List] = None - union_type: Optional[Type] = None - for arg in args: - if arg is None or arg is type(None): # noqa - found_None = True - else: - # returns ((int,), list) - union_type_args = getattr(arg, '__args__', ()) - union_type = arg - if find_origin: - union_type = getattr(arg, '__origin__', arg) - if union_type is not None and found_None: - return cast(List, union_type_args), union_type - else: - if find_origin: - # List[int] -> ((int,), list) - typ = typ.__origin__ # for List this is list, etc. - return args, typ - elif typ.__class__.__name__ == '_Union': # pragma: no cover - # Py3.6 - # Optional[List[int]] gives Union[List[int], type(None)] +def _remove_optional(typ: Type, *, find_origin: bool = False) -> Tuple[List[Any], Type]: + args = getattr(typ, "__args__", ()) + if is_union(typ): + # 3.7+: Optional[List[int]] -> Union[List[int], NoneType] + # 3.6: Optional[List[int]] -> Union[List[int], type(None)] # returns: ((int,), list) found_None = False - union_type_args = None - union_type = None + union_type_args: Optional[List] = None + union_type: Optional[Type] = None for arg in args: if arg is None or arg is type(None): # noqa found_None = True else: - union_type_args = getattr(arg, '__args__', ()) + union_type_args = getattr(arg, "__args__", ()) union_type = arg if find_origin: - if union_type is not None: + if union_type is not None and sys.version_info.minor == 6: union_type = _py36_maybe_unwrap_GenericMeta(union_type) + else: + union_type = getattr(arg, "__origin__", arg) if union_type is not None and found_None: return cast(List, union_type_args), union_type if find_origin: - typ = _py36_maybe_unwrap_GenericMeta(typ) + if hasattr(typ, "__origin__"): + # List[int] -> ((int,), list) + typ = _py36_maybe_unwrap_GenericMeta(typ) return args, typ def _py36_maybe_unwrap_GenericMeta(typ: Type) -> Type: - if typ.__class__.__name__ == 'GenericMeta': # Py3.6 + if typ.__class__.__name__ == "GenericMeta": # Py3.6 orig_bases = typ.__orig_bases__ if orig_bases and orig_bases[0] in (list, tuple, dict, set): return cast(Type, orig_bases[0]) - return cast(Type, getattr(typ, '__origin__', typ)) + return cast(Type, getattr(typ, "__origin__", typ)) def guess_polymorphic_type( - typ: Type, - *, - set_types: Tuple[Type, ...] = SET_TYPES, - list_types: Tuple[Type, ...] = LIST_TYPES, - tuple_types: Tuple[Type, ...] = TUPLE_TYPES, - dict_types: Tuple[Type, ...] = DICT_TYPES) -> Tuple[Type, Type]: + typ: Type, + *, + set_types: Tuple[Type, ...] = SET_TYPES, + list_types: Tuple[Type, ...] = LIST_TYPES, + tuple_types: Tuple[Type, ...] = TUPLE_TYPES, + dict_types: Tuple[Type, ...] = DICT_TYPES, +) -> Tuple[Type, Type]: """Try to find the polymorphic and concrete type of an abstract type. Returns tuple of ``(polymorphic_type, concrete_type)``. @@ -561,7 +566,7 @@ def guess_polymorphic_type( elif issubclass(typ, dict_types): # Dict[_, x] return dict, args[1] if args and len(args) > 1 else Any - raise TypeError(f'Not a generic type: {typ!r}') + raise TypeError(f"Not a generic type: {typ!r}") guess_concrete_type = guess_polymorphic_type # XXX compat @@ -573,28 +578,32 @@ def _unary_type_arg(args: List[Type]) -> Any: def label(s: Any) -> str: """Return the name of an object as string.""" - return _label('label', s) + return _label("label", s) def shortlabel(s: Any) -> str: """Return the shortened name of an object as string.""" - return _label('shortlabel', s) + return _label("shortlabel", s) -def _label(label_attr: str, s: Any, - pass_types: Tuple[Type, ...] = (str,), - str_types: Tuple[Type, ...] = (str, int, float, Decimal)) -> str: +def _label( + label_attr: str, + s: Any, + pass_types: Tuple[Type, ...] = (str,), + str_types: Tuple[Type, ...] = (str, int, float, Decimal), +) -> str: if isinstance(s, pass_types): return cast(str, s) elif isinstance(s, str_types): return str(s) return str( - getattr(s, label_attr, None) or - getattr(s, 'name', None) or - getattr(s, '__qualname__', None) or - getattr(s, '__name__', None) or - getattr(type(s), '__qualname__', None) or - type(s).__name__) + getattr(s, label_attr, None) + or getattr(s, "name", None) + or getattr(s, "__qualname__", None) + or getattr(s, "__name__", None) + or getattr(type(s), "__qualname__", None) + or type(s).__name__ + ) class cached_property(Generic[RT]): @@ -623,12 +632,14 @@ def connection(self, value): print(f'Connection {value!r} deleted') """ - def __init__(self, - fget: Callable[[Any], RT], - fset: Callable[[Any, RT], RT] = None, - fdel: Callable[[Any, RT], None] = None, - doc: str = None, - class_attribute: str = None) -> None: + def __init__( + self, + fget: Callable[[Any], RT], + fset: Callable[[Any, RT], RT] = None, + fdel: Callable[[Any, RT], None] = None, + doc: str = None, + class_attribute: str = None, + ) -> None: self.__get: Callable[[Any], RT] = fget self.__set: Optional[Callable[[Any, RT], RT]] = fset self.__del: Optional[Callable[[Any, RT], None]] = fdel @@ -640,9 +651,7 @@ def __init__(self, def is_set(self, obj: Any) -> bool: return self.__name__ in obj.__dict__ - def __get__(self, - obj: Any, - type: Type = None) -> RT: + def __get__(self, obj: Any, type: Type = None) -> RT: if obj is None: if type is not None and self.class_attribute: return cast(RT, getattr(type, self.class_attribute)) @@ -663,8 +672,8 @@ def __delete__(self, obj: Any, _sentinel: Any = object()) -> None: if self.__del is not None and value is not _sentinel: self.__del(obj, value) - def setter(self, fset: Callable[[Any, RT], RT]) -> 'cached_property': + def setter(self, fset: Callable[[Any, RT], RT]) -> "cached_property": return self.__class__(self.__get, fset, self.__del) - def deleter(self, fdel: Callable[[Any, RT], None]) -> 'cached_property': + def deleter(self, fdel: Callable[[Any, RT], None]) -> "cached_property": return self.__class__(self.__get, self.__set, fdel) diff --git a/mode/utils/queues.py b/mode/utils/queues.py index b5ab377c..c78f19e9 100644 --- a/mode/utils/queues.py +++ b/mode/utils/queues.py @@ -5,11 +5,12 @@ from collections import deque from typing import Any, Callable, List, Set, TypeVar, cast, no_type_check from weakref import WeakSet + from .locks import Event from .objects import cached_property from .typing import Deque -_T = TypeVar('_T') +_T = TypeVar("_T") class FlowControlEvent: @@ -49,12 +50,15 @@ class FlowControlEvent: """ if typing.TYPE_CHECKING: - _queues: WeakSet['FlowControlQueue'] + _queues: WeakSet["FlowControlQueue"] _queues = None - def __init__(self, *, - initially_suspended: bool = True, - loop: asyncio.AbstractEventLoop = None) -> None: + def __init__( + self, + *, + initially_suspended: bool = True, + loop: asyncio.AbstractEventLoop = None + ) -> None: self.loop = loop self._resume = Event(loop=self.loop) self._suspend = Event(loop=self.loop) @@ -62,7 +66,7 @@ def __init__(self, *, self._suspend.set() self._queues = WeakSet() - def manage_queue(self, queue: 'FlowControlQueue') -> None: + def manage_queue(self, queue: "FlowControlQueue") -> None: """Add :class:`FlowControlQueue` to be cleared on resume.""" self._queues.add(queue) @@ -97,16 +101,19 @@ class FlowControlQueue(asyncio.Queue): :class:`FlowControlEvent`. """ - pressure_high_ratio = 1.25 # divided by - pressure_drop_ratio = 0.40 # multiplied by + pressure_high_ratio = 1.25 # divided by + pressure_drop_ratio = 0.40 # multiplied by _pending_pressure_drop_callbacks: Set[Callable] - def __init__(self, maxsize: int = 0, - *, - flow_control: FlowControlEvent, - clear_on_resume: bool = False, - **kwargs: Any) -> None: + def __init__( + self, + maxsize: int = 0, + *, + flow_control: FlowControlEvent, + clear_on_resume: bool = False, + **kwargs: Any + ) -> None: self._flow_control: FlowControlEvent = flow_control self._clear_on_resume: bool = clear_on_resume if self._clear_on_resume: @@ -117,9 +124,9 @@ def __init__(self, maxsize: int = 0, def clear(self) -> None: self._queue.clear() # type: ignore - def put_nowait_enhanced(self, value: _T, *, - on_pressure_high: Callable, - on_pressure_drop: Callable) -> bool: + def put_nowait_enhanced( + self, value: _T, *, on_pressure_high: Callable, on_pressure_drop: Callable + ) -> bool: in_pressure_high_state = self.in_pressure_high_state(on_pressure_drop) if in_pressure_high_state: on_pressure_high() diff --git a/mode/utils/text.py b/mode/utils/text.py index ad632681..da6bd2b2 100644 --- a/mode/utils/text.py +++ b/mode/utils/text.py @@ -1,21 +1,22 @@ """Text and string manipulation utilities.""" from difflib import SequenceMatcher from typing import AnyStr, Iterable, Iterator, NamedTuple, Optional + from .compat import want_str __all__ = [ - 'FuzzyMatch', - 'title', - 'didyoumean', - 'fuzzymatch_choices', - 'fuzzymatch_iter', - 'fuzzymatch_best', - 'abbr', - 'abbr_fqdn', - 'enumeration', - 'shorten_fqdn', - 'pluralize', - 'maybecat', + "FuzzyMatch", + "title", + "didyoumean", + "fuzzymatch_choices", + "fuzzymatch_iter", + "fuzzymatch_best", + "abbr", + "abbr_fqdn", + "enumeration", + "shorten_fqdn", + "pluralize", + "maybecat", ] @@ -33,18 +34,20 @@ def title(s: str) -> str: ``"foo-bar" -> "Foo Bar"`` """ - return ' '.join( - p.capitalize() - for p in s.replace('-', ' ') - .replace('_', ' ').split()) - - -def didyoumean(haystack: Iterable[str], needle: str, - *, - fmt_many: str = 'Did you mean one of {alt}?', - fmt_one: str = 'Did you mean {alt}?', - fmt_none: str = '', - min_ratio: float = 0.6) -> str: + return " ".join( + p.capitalize() for p in s.replace("-", " ").replace("_", " ").split() + ) + + +def didyoumean( + haystack: Iterable[str], + needle: str, + *, + fmt_many: str = "Did you mean one of {alt}?", + fmt_one: str = "Did you mean {alt}?", + fmt_none: str = "", + min_ratio: float = 0.6, +) -> str: """Generate message with helpful list of alternatives. Examples: @@ -74,7 +77,8 @@ def didyoumean(haystack: Iterable[str], needle: str, Default is 0.6. """ return fuzzymatch_choices( - list(haystack), needle, + list(haystack), + needle, fmt_many=fmt_many, fmt_one=fmt_one, fmt_none=fmt_none, @@ -82,10 +86,13 @@ def didyoumean(haystack: Iterable[str], needle: str, ) -def enumeration(items: Iterable[str], *, - start: int = 1, - sep: str = '\n', - template: str = '{index}) {item}') -> str: +def enumeration( + items: Iterable[str], + *, + start: int = 1, + sep: str = "\n", + template: str = "{index}) {item}", +) -> str: r"""Enumerate list of strings. Example: @@ -98,31 +105,34 @@ def enumeration(items: Iterable[str], *, ) -def fuzzymatch_choices(haystack: Iterable[str], needle: str, - *, - fmt_many: str = 'one of {alt}', - fmt_one: str = '{alt}', - fmt_none: str = '', - min_ratio: float = 0.6) -> str: +def fuzzymatch_choices( + haystack: Iterable[str], + needle: str, + *, + fmt_many: str = "one of {alt}", + fmt_one: str = "{alt}", + fmt_none: str = "", + min_ratio: float = 0.6, +) -> str: """Fuzzy match reducing to error message suggesting an alternative.""" alt = list(fuzzymatch(haystack, needle, min_ratio=min_ratio)) if not alt: return fmt_none return (fmt_many if len(alt) > 1 else fmt_one).format( - alt=', '.join(alt), + alt=", ".join(alt), ) -def fuzzymatch(haystack: Iterable[str], needle: str, - *, - min_ratio: float = 0.6) -> Iterator[str]: +def fuzzymatch( + haystack: Iterable[str], needle: str, *, min_ratio: float = 0.6 +) -> Iterator[str]: for match in fuzzymatch_iter(haystack, needle, min_ratio=min_ratio): yield match.value -def fuzzymatch_iter(haystack: Iterable[str], needle: str, - *, - min_ratio: float = 0.6) -> Iterator[FuzzyMatch]: +def fuzzymatch_iter( + haystack: Iterable[str], needle: str, *, min_ratio: float = 0.6 +) -> Iterator[FuzzyMatch]: """Fuzzy Match: Including actual ratio. Yields: @@ -134,24 +144,20 @@ def fuzzymatch_iter(haystack: Iterable[str], needle: str, yield FuzzyMatch(ratio, key) -def fuzzymatch_best(haystack: Iterable[str], needle: str, - *, - min_ratio: float = 0.6) -> Optional[str]: +def fuzzymatch_best( + haystack: Iterable[str], needle: str, *, min_ratio: float = 0.6 +) -> Optional[str]: """Fuzzy Match - Return best match only (single scalar value).""" try: return sorted( - fuzzymatch_iter( - haystack, - needle, - min_ratio=min_ratio), + fuzzymatch_iter(haystack, needle, min_ratio=min_ratio), reverse=True, )[0].value except IndexError: return None -def abbr(s: str, max: int, suffix: str = '...', - words: bool = False) -> str: +def abbr(s: str, max: int, suffix: str = "...", words: bool = False) -> str: """Abbreviate word.""" if words: return _abbr_word_boundary(s, max, suffix) @@ -162,18 +168,18 @@ def _abbr_word_boundary(s: str, max: int, suffix: str) -> str: # Do not cut-off any words, but means the limit is even harder # and we won't include any partial words. if len(s) > max: - return suffix and (s[:max - len(suffix)] + suffix) or s[:max] + return suffix and (s[: max - len(suffix)] + suffix) or s[:max] return s -def _abbr_abrupt(s: str, max: int, suffix: str = '...') -> str: +def _abbr_abrupt(s: str, max: int, suffix: str = "...") -> str: # hard limit (can cut off in the middle of a word). if max and len(s) >= max: - return s[:max].rsplit(' ', 1)[0] + suffix + return s[:max].rsplit(" ", 1)[0] + suffix return s -def abbr_fqdn(origin: str, name: str, *, prefix: str = '') -> str: +def abbr_fqdn(origin: str, name: str, *, prefix: str = "") -> str: """Abbreviate fully-qualified Python name, by removing origin. ``app.origin`` is the package where the app is defined, @@ -191,31 +197,31 @@ def abbr_fqdn(origin: str, name: str, *, prefix: str = '') -> str: abbr_fqdn will only remove the origin portion of the name. """ if name.startswith(origin): - name = name[len(origin) + 1:] - return f'{prefix}{name}' + name = name[len(origin) + 1 :] + return f"{prefix}{name}" return name def shorten_fqdn(s: str, max: int = 32) -> str: """Shorten fully-qualified Python name (like "os.path.isdir").""" if len(s) > max: - module, sep, cls = s.rpartition('.') + module, sep, cls = s.rpartition(".") if sep: - module = abbr(module, max - len(cls) - 3, '', words=True) - return module + '[.]' + cls + module = abbr(module, max - len(cls) - 3, "", words=True) + return module + "[.]" + cls return s -def pluralize(n: int, text: str, suffix: str = 's') -> str: +def pluralize(n: int, text: str, suffix: str = "s") -> str: """Pluralize term when n is greater than one.""" if n != 1: return text + suffix return text -def maybecat(s: Optional[AnyStr], suffix: str = '', - *, - prefix: str = '') -> Optional[str]: +def maybecat( + s: Optional[AnyStr], suffix: str = "", *, prefix: str = "" +) -> Optional[str]: """Concatenate string only if existing string s' is defined. Keyword Arguments: diff --git a/mode/utils/times.py b/mode/utils/times.py index 65672889..87e113bb 100644 --- a/mode/utils/times.py +++ b/mode/utils/times.py @@ -12,18 +12,18 @@ from .typing import AsyncContextManager __all__ = [ - 'Bucket', - 'Seconds', - 'TokenBucket', - 'rate', - 'rate_limit', - 'want_seconds', - 'humanize_seconds', - 'humanize_seconds_ago', + "Bucket", + "Seconds", + "TokenBucket", + "rate", + "rate_limit", + "want_seconds", + "humanize_seconds", + "humanize_seconds_ago", ] TIME_MONOTONIC: Callable[[], float] -if sys.platform == 'win32': +if sys.platform == "win32": TIME_MONOTONIC = time.time else: TIME_MONOTONIC = time.monotonic @@ -39,19 +39,19 @@ class Unit(NamedTuple): TIME_UNITS: List[Unit] = [ - Unit('day', 60 * 60 * 24.0, lambda n: format(n, '.2f')), - Unit('hour', 60 * 60.0, lambda n: format(n, '.2f')), - Unit('minute', 60.0, lambda n: format(n, '.2f')), - Unit('second', 1.0, lambda n: format(n, '.2f')), + Unit("day", 60 * 60 * 24.0, lambda n: format(n, ".2f")), + Unit("hour", 60 * 60.0, lambda n: format(n, ".2f")), + Unit("minute", 60.0, lambda n: format(n, ".2f")), + Unit("second", 1.0, lambda n: format(n, ".2f")), ] #: What the characters in a "rate" string means. #: E.g. 8/s is "eight in one second" RATE_MODIFIER_MAP: Mapping[str, Callable[[float], float]] = { - 's': lambda n: n, - 'm': lambda n: n / 60.0, - 'h': lambda n: n / 60.0 / 60.0, - 'd': lambda n: n / 60.0 / 60.0 / 24, + "s": lambda n: n, + "m": lambda n: n / 60.0, + "h": lambda n: n / 60.0 / 60.0, + "d": lambda n: n / 60.0 / 60.0 / 24, } @@ -104,12 +104,16 @@ class MyError(Exception): _tokens: float - def __init__(self, rate: Seconds, over: Seconds = 1.0, - *, - fill_rate: Seconds = None, - capacity: Seconds = None, - raises: Type[BaseException] = None, - loop: asyncio.AbstractEventLoop = None) -> None: + def __init__( + self, + rate: Seconds, + over: Seconds = 1.0, + *, + fill_rate: Seconds = None, + capacity: Seconds = None, + raises: Type[BaseException] = None, + loop: asyncio.AbstractEventLoop = None + ) -> None: self.rate = want_seconds(rate) self.capacity = want_seconds(over) self.raises = raises @@ -139,18 +143,20 @@ def fill_rate(self) -> float: #: faster/slower, then just override this. return self.rate - async def __aenter__(self) -> 'Bucket': + async def __aenter__(self) -> "Bucket": if not self.pour(): if self.raises: raise self.raises() expected_time = self.expected_time() - await asyncio.sleep(expected_time, loop=self.loop) + await asyncio.sleep(expected_time) return self - async def __aexit__(self, - exc_type: Type[BaseException] = None, - exc_val: BaseException = None, - exc_tb: TracebackType = None) -> Optional[bool]: + async def __aexit__( + self, + exc_type: Type[BaseException] = None, + exc_val: BaseException = None, + exc_tb: TracebackType = None, + ) -> Optional[bool]: return None @@ -197,8 +203,8 @@ def rate(r: float) -> float: @rate.register(str) def _rate_str(r: str) -> float: # noqa: F811 - ops, _, modifier = r.partition('/') - return RATE_MODIFIER_MAP[modifier or 's'](float(ops)) or 0 + ops, _, modifier = r.partition("/") + return RATE_MODIFIER_MAP[modifier or "s"](float(ops)) or 0 @rate.register(int) # noqa: F811 @@ -211,11 +217,14 @@ def _rate_None(r: None) -> float: return 0.0 -def rate_limit(rate: float, over: Seconds = 1.0, - *, - bucket_type: Type[Bucket] = TokenBucket, - raises: Type[BaseException] = None, - loop: asyncio.AbstractEventLoop = None) -> Bucket: +def rate_limit( + rate: float, + over: Seconds = 1.0, + *, + bucket_type: Type[Bucket] = TokenBucket, + raises: Type[BaseException] = None, + loop: asyncio.AbstractEventLoop = None +) -> Bucket: """Create rate limiting manager.""" return bucket_type(rate, over, raises=raises, loop=loop) @@ -236,12 +245,15 @@ def _want_seconds_timedelta(s: timedelta) -> float: return s.total_seconds() -def humanize_seconds(secs: float, *, - prefix: str = '', - suffix: str = '', - sep: str = '', - now: str = 'now', - microseconds: bool = False) -> str: +def humanize_seconds( + secs: float, + *, + prefix: str = "", + suffix: str = "", + sep: str = "", + now: str = "now", + microseconds: bool = False +) -> str: """Show seconds in human form. For example, 60 becomes "1 minute", and 7200 becomes "2 hours". @@ -255,25 +267,29 @@ def humanize_seconds(secs: float, *, now (str): Literal 'now'. microseconds (bool): Include microseconds. """ - secs = float(format(float(secs), '.2f')) + secs = float(format(float(secs), ".2f")) for unit, divider, formatter in TIME_UNITS: if secs >= divider: w = secs / float(divider) - return '{0}{1}{2} {3}{4}'.format( - prefix, sep, formatter(w), - pluralize(int(w), unit), suffix) + return "{0}{1}{2} {3}{4}".format( + prefix, sep, formatter(w), pluralize(int(w), unit), suffix + ) if microseconds and secs > 0.0: - return '{prefix}{sep}{0:.2f} seconds{suffix}'.format( - secs, sep=sep, prefix=prefix, suffix=suffix) + return "{prefix}{sep}{0:.2f} seconds{suffix}".format( + secs, sep=sep, prefix=prefix, suffix=suffix + ) return now -def humanize_seconds_ago(secs: float, *, - prefix: str = '', - suffix: str = ' ago', - sep: str = '', - now: str = 'just now', - microseconds: bool = False) -> str: +def humanize_seconds_ago( + secs: float, + *, + prefix: str = "", + suffix: str = " ago", + sep: str = "", + now: str = "just now", + microseconds: bool = False +) -> str: """Show seconds in "3.33 seconds ago" form. If seconds are less than one, returns "just now". diff --git a/mode/utils/tracebacks.py b/mode/utils/tracebacks.py index 6bc5315c..96348405 100644 --- a/mode/utils/tracebacks.py +++ b/mode/utils/tracebacks.py @@ -6,11 +6,11 @@ from traceback import StackSummary, print_list, walk_tb from types import FrameType, TracebackType from typing import ( + IO, Any, AsyncGenerator, Coroutine, Generator, - IO, Mapping, Optional, Union, @@ -18,20 +18,23 @@ ) __all__ = [ - 'Traceback', - 'format_task_stack', - 'print_task_stack', + "Traceback", + "format_task_stack", + "print_task_stack", ] DEFAULT_MAX_FRAMES = sys.getrecursionlimit() // 8 -def print_task_stack(task: asyncio.Task, *, - file: IO = sys.stderr, - limit: int = DEFAULT_MAX_FRAMES, - capture_locals: bool = False) -> None: +def print_task_stack( + task: asyncio.Task, + *, + file: IO = sys.stderr, + limit: int = DEFAULT_MAX_FRAMES, + capture_locals: bool = False, +) -> None: """Print the stack trace for an :class:`asyncio.Task`.""" - print(f'Stack for {task!r} (most recent call last):', file=file) + print(f"Stack for {task!r} (most recent call last):", file=file) tb = Traceback.from_task(task, limit=limit) print_list( StackSummary.extract( @@ -43,12 +46,15 @@ def print_task_stack(task: asyncio.Task, *, ) -def print_coro_stack(coro: Coroutine, *, - file: IO = sys.stderr, - limit: int = DEFAULT_MAX_FRAMES, - capture_locals: bool = False) -> None: +def print_coro_stack( + coro: Coroutine, + *, + file: IO = sys.stderr, + limit: int = DEFAULT_MAX_FRAMES, + capture_locals: bool = False, +) -> None: """Print the stack trace for a currently running coroutine.""" - print(f'Stack for {coro!r} (most recent call last):', file=file) + print(f"Stack for {coro!r} (most recent call last):", file=file) tb = Traceback.from_coroutine(coro, limit=limit) print_list( StackSummary.extract( @@ -60,12 +66,15 @@ def print_coro_stack(coro: Coroutine, *, ) -def print_agen_stack(agen: AsyncGenerator, *, - file: IO = sys.stderr, - limit: int = DEFAULT_MAX_FRAMES, - capture_locals: bool = False) -> None: +def print_agen_stack( + agen: AsyncGenerator, + *, + file: IO = sys.stderr, + limit: int = DEFAULT_MAX_FRAMES, + capture_locals: bool = False, +) -> None: """Print the stack trace for a currently running async generator.""" - print(f'Stack for {agen!r} (most recent call last):', file=file) + print(f"Stack for {agen!r} (most recent call last):", file=file) tb = Traceback.from_agen(agen, limit=limit) print_list( StackSummary.extract( @@ -77,27 +86,30 @@ def print_agen_stack(agen: AsyncGenerator, *, ) -def format_task_stack(task: asyncio.Task, *, - limit: int = DEFAULT_MAX_FRAMES, - capture_locals: bool = False) -> str: +def format_task_stack( + task: asyncio.Task, *, limit: int = DEFAULT_MAX_FRAMES, capture_locals: bool = False +) -> str: """Format :class:`asyncio.Task` stack trace as a string.""" f = io.StringIO() print_task_stack(task, file=f, limit=limit, capture_locals=capture_locals) return f.getvalue() -def format_coro_stack(coro: Coroutine, *, - limit: int = DEFAULT_MAX_FRAMES, - capture_locals: bool = False) -> str: +def format_coro_stack( + coro: Coroutine, *, limit: int = DEFAULT_MAX_FRAMES, capture_locals: bool = False +) -> str: """Format coroutine stack trace as a string.""" f = io.StringIO() print_coro_stack(coro, file=f, limit=limit, capture_locals=capture_locals) return f.getvalue() -def format_agen_stack(agen: AsyncGenerator, *, - limit: int = DEFAULT_MAX_FRAMES, - capture_locals: bool = False) -> str: +def format_agen_stack( + agen: AsyncGenerator, + *, + limit: int = DEFAULT_MAX_FRAMES, + capture_locals: bool = False, +) -> str: f = io.StringIO() print_agen_stack(agen, file=f, limit=limit, capture_locals=capture_locals) return f.getvalue() @@ -118,10 +130,9 @@ class _CustomFrame: f_code: _CustomCode f_locals: Mapping[str, Any] - def __init__(self, - globals: Mapping[str, Any], - fileno: int, - code: _CustomCode) -> None: + def __init__( + self, globals: Mapping[str, Any], fileno: int, code: _CustomCode + ) -> None: self.f_globals = globals self.f_fileno = fileno self.f_code = code @@ -132,27 +143,29 @@ class _BaseTraceback: tb_frame: FrameType tb_lineno: int tb_lasti: int - tb_next: Optional['_BaseTraceback'] + tb_next: Optional["_BaseTraceback"] class _Truncated(_BaseTraceback): - - def __init__(self, - filename: str = '...', - name: str = '[rest of traceback truncated]') -> None: + def __init__( + self, filename: str = "...", name: str = "[rest of traceback truncated]" + ) -> None: self.tb_lineno = -1 - self.tb_frame = cast(FrameType, _CustomFrame( - globals={ - '__file__': '', - '__name__': '', - '__loader__': None, - }, - fileno=-1, - code=_CustomCode( - filename=filename, - name=name, + self.tb_frame = cast( + FrameType, + _CustomFrame( + globals={ + "__file__": "", + "__name__": "", + "__loader__": None, + }, + fileno=-1, + code=_CustomCode( + filename=filename, + name=name, + ), ), - )) + ) self.tb_next = None self.tb_lasti = -1 @@ -160,39 +173,41 @@ def __init__(self, class Traceback(_BaseTraceback): """Traceback object with truncated frames.""" - def __init__(self, - frame: FrameType, - lineno: int = None, - lasti: int = None) -> None: + def __init__(self, frame: FrameType, lineno: int = None, lasti: int = None) -> None: self.tb_frame = frame self.tb_lineno = lineno if lineno is not None else frame.f_lineno self.tb_lasti = lasti if lasti is not None else frame.f_lasti self.tb_next = None @classmethod - def from_task(cls, task: asyncio.Task, *, - limit: int = DEFAULT_MAX_FRAMES) -> _BaseTraceback: + def from_task( + cls, task: asyncio.Task, *, limit: int = DEFAULT_MAX_FRAMES + ) -> _BaseTraceback: coro = task._coro # type: ignore return cls.from_coroutine(coro, limit=limit) @classmethod - def from_agen(cls, agen: AsyncGenerator, *, - limit: int = DEFAULT_MAX_FRAMES) -> _BaseTraceback: + def from_agen( + cls, agen: AsyncGenerator, *, limit: int = DEFAULT_MAX_FRAMES + ) -> _BaseTraceback: return cls.from_coroutine(agen, limit=limit) @classmethod def from_coroutine( - cls, coro: Union[AsyncGenerator, Coroutine, Generator], *, - depth: int = 0, - limit: Optional[int] = DEFAULT_MAX_FRAMES) -> _BaseTraceback: + cls, + coro: Union[AsyncGenerator, Coroutine, Generator], + *, + depth: int = 0, + limit: Optional[int] = DEFAULT_MAX_FRAMES, + ) -> _BaseTraceback: try: frame = cls._detect_frame(coro) except AttributeError: - if type(coro).__name__ == 'async_generator_asend': - return _Truncated(filename='async_generator_asend') + if type(coro).__name__ == "async_generator_asend": + return _Truncated(filename="async_generator_asend") raise if limit is None: - limit = getattr(sys, 'tracebacklimit', None) + limit = getattr(sys, "tracebacklimit", None) if limit is not None and limit < 0: limit = 0 frames = [] @@ -221,14 +236,13 @@ def from_coroutine( if limit is not None and depth > limit: next_node = _Truncated() else: - next_node = cls.from_coroutine( - cr_await, limit=limit, depth=depth + 1) + next_node = cls.from_coroutine(cr_await, limit=limit, depth=depth + 1) if root is not None: root.tb_next = next_node else: return next_node if root is None: - raise RuntimeError('cannot find stack of coroutine') + raise RuntimeError("cannot find stack of coroutine") return root @classmethod @@ -238,8 +252,7 @@ def _detect_frame(cls, obj: Any) -> FrameType: return cls._get_coroutine_frame(obj) @classmethod - def _get_coroutine_frame(cls, - coro: Union[Coroutine, Generator]) -> FrameType: + def _get_coroutine_frame(cls, coro: Union[Coroutine, Generator]) -> FrameType: try: if inspect.isgenerator(coro): # is a @asyncio.coroutine wrapped generator @@ -253,8 +266,10 @@ def _get_coroutine_frame(cls, @classmethod def _what_is_this(cls, obj: Any) -> AttributeError: return AttributeError( - 'WHAT IS THIS? str={0} repr={1!r} typ={2!r} dir={3}'.format( - obj, obj, type(obj), dir(obj))) + "WHAT IS THIS? str={0} repr={1!r} typ={2!r} dir={3}".format( + obj, obj, type(obj), dir(obj) + ) + ) @classmethod def _get_agen_frame(cls, agen: AsyncGenerator) -> FrameType: @@ -264,8 +279,7 @@ def _get_agen_frame(cls, agen: AsyncGenerator) -> FrameType: raise cls._what_is_this(agen) from exc @staticmethod - def _get_coroutine_next( - coro: Union[AsyncGenerator, Coroutine, Generator]) -> Any: + def _get_coroutine_next(coro: Union[AsyncGenerator, Coroutine, Generator]) -> Any: if inspect.isasyncgen(coro): # is a async def async-generator return cast(AsyncGenerator, coro).ag_await diff --git a/mode/utils/trees.py b/mode/utils/trees.py index 0732654c..b3d58efc 100644 --- a/mode/utils/trees.py +++ b/mode/utils/trees.py @@ -9,10 +9,10 @@ from .typing import Deque __all__ = [ - 'Node', + "Node", ] -T = TypeVar('T') +T = TypeVar("T") class Node(NodeT[T]): @@ -44,11 +44,14 @@ class Node(NodeT[T]): def _new_node(cls, data: T, **kwargs: Any) -> NodeT[T]: return cls(data, **kwargs) - def __init__(self, data: T, - *, - root: NodeT = None, - parent: NodeT = None, - children: List[NodeT[T]] = None) -> None: + def __init__( + self, + data: T, + *, + root: NodeT = None, + parent: NodeT = None, + children: List[NodeT[T]] = None, + ) -> None: self.data = data if root is not None: self.root = root @@ -133,7 +136,7 @@ def as_graph(self) -> DependencyGraphT: return graph def __repr__(self) -> str: - return f'{type(self).__name__}: {self.path}' + return f"{type(self).__name__}: {self.path}" @property def depth(self) -> int: @@ -144,9 +147,7 @@ def _find_depth(self) -> int: @property def path(self) -> str: - return '/'.join(reversed([ - shortlabel(node.data) for node in self.walk() - ])) + return "/".join(reversed([shortlabel(node.data) for node in self.walk()])) @property def parent(self) -> Optional[NodeT]: @@ -155,7 +156,7 @@ def parent(self) -> Optional[NodeT]: @parent.setter def parent(self, node: NodeT) -> None: if node is self: - raise ValueError('Parent node cannot be itself.') + raise ValueError("Parent node cannot be itself.") self._parent = node @property @@ -165,5 +166,5 @@ def root(self) -> Optional[NodeT]: @root.setter def root(self, node: NodeT) -> None: if node is self: - raise ValueError('Root node cannot be itself.') + raise ValueError("Root node cannot be itself.") self._root = node diff --git a/mode/utils/types/graphs.py b/mode/utils/types/graphs.py index 480ddda1..37feb882 100644 --- a/mode/utils/types/graphs.py +++ b/mode/utils/types/graphs.py @@ -1,12 +1,19 @@ """Type classes for :mod:`mode.utils.graphs`.""" import abc from typing import ( - Any, Generic, IO, Iterable, Mapping, MutableMapping, Sequence, TypeVar, + IO, + Any, + Generic, + Iterable, + Mapping, + MutableMapping, + Sequence, + TypeVar, ) -__all__ = ['GraphFormatterT', 'DependencyGraphT'] +__all__ = ["GraphFormatterT", "DependencyGraphT"] -_T = TypeVar('_T') +_T = TypeVar("_T") class GraphFormatterT(Generic[_T]): @@ -19,13 +26,15 @@ class GraphFormatterT(Generic[_T]): graph_scheme: Mapping[str, Any] @abc.abstractmethod - def __init__(self, - root: Any = None, - type: str = None, - id: str = None, - indent: int = 0, - inw: str = ' ' * 4, - **scheme: Any) -> None: + def __init__( + self, + root: Any = None, + type: str = None, + id: str = None, + indent: int = 0, + inw: str = " " * 4, + **scheme: Any + ) -> None: ... @abc.abstractmethod @@ -65,15 +74,13 @@ def FMT(self, fmt: str, *args: Any, **kwargs: Any) -> str: ... @abc.abstractmethod - def draw_edge(self, a: _T, b: _T, - scheme: Mapping = None, - attrs: Mapping = None) -> str: + def draw_edge( + self, a: _T, b: _T, scheme: Mapping = None, attrs: Mapping = None + ) -> str: ... @abc.abstractmethod - def draw_node(self, obj: _T, - scheme: Mapping = None, - attrs: Mapping = None) -> str: + def draw_node(self, obj: _T, scheme: Mapping = None, attrs: Mapping = None) -> str: ... @@ -83,9 +90,9 @@ class DependencyGraphT(Generic[_T], Mapping[_T, _T]): adjacent: MutableMapping[_T, _T] @abc.abstractmethod - def __init__(self, - it: Iterable[_T] = None, - formatter: GraphFormatterT[_T] = None) -> None: + def __init__( + self, it: Iterable[_T] = None, formatter: GraphFormatterT[_T] = None + ) -> None: ... @abc.abstractmethod @@ -97,7 +104,7 @@ def add_edge(self, A: _T, B: _T) -> None: ... @abc.abstractmethod - def connect(self, graph: 'DependencyGraphT') -> None: + def connect(self, graph: "DependencyGraphT") -> None: ... @abc.abstractmethod diff --git a/mode/utils/types/trees.py b/mode/utils/types/trees.py index 2df13f7b..da08edef 100644 --- a/mode/utils/types/trees.py +++ b/mode/utils/types/trees.py @@ -1,11 +1,12 @@ """Type classes for :mod:`mode.utils.trees`.""" import abc from typing import Any, Generic, Iterator, List, Optional, TypeVar, Union + from .graphs import DependencyGraphT -__all__ = ['NodeT'] +__all__ = ["NodeT"] -_T = TypeVar('_T') +_T = TypeVar("_T") class NodeT(Generic[_T]): @@ -16,19 +17,19 @@ class NodeT(Generic[_T]): @classmethod @abc.abstractmethod - def _new_node(cls, data: _T, **kwargs: Any) -> 'NodeT': + def _new_node(cls, data: _T, **kwargs: Any) -> "NodeT": ... @abc.abstractmethod - def new(self, data: _T) -> 'NodeT': + def new(self, data: _T) -> "NodeT": ... @abc.abstractmethod - def add(self, data: Union[_T, 'NodeT[_T]']) -> None: + def add(self, data: Union[_T, "NodeT[_T]"]) -> None: ... @abc.abstractmethod - def add_deduplicate(self, data: Union[_T, 'NodeT[_T]']) -> None: + def add_deduplicate(self, data: Union[_T, "NodeT[_T]"]) -> None: ... @abc.abstractmethod @@ -36,15 +37,15 @@ def discard(self, data: _T) -> None: ... @abc.abstractmethod - def reattach(self, parent: 'NodeT') -> 'NodeT': + def reattach(self, parent: "NodeT") -> "NodeT": ... @abc.abstractmethod - def traverse(self) -> Iterator['NodeT']: + def traverse(self) -> Iterator["NodeT"]: ... @abc.abstractmethod - def walk(self) -> Iterator['NodeT']: + def walk(self) -> Iterator["NodeT"]: ... @abc.abstractmethod @@ -52,25 +53,25 @@ def as_graph(self) -> DependencyGraphT: ... @abc.abstractmethod - def detach(self, parent: 'NodeT') -> 'NodeT': + def detach(self, parent: "NodeT") -> "NodeT": ... @property @abc.abstractmethod - def parent(self) -> Optional['NodeT']: + def parent(self) -> Optional["NodeT"]: ... @parent.setter - def parent(self, node: 'NodeT') -> None: + def parent(self, node: "NodeT") -> None: ... @property @abc.abstractmethod - def root(self) -> Optional['NodeT']: + def root(self) -> Optional["NodeT"]: ... @root.setter - def root(self, node: 'NodeT') -> None: + def root(self, node: "NodeT") -> None: ... @property diff --git a/mode/utils/typing.py b/mode/utils/typing.py index 375c5054..36b874d6 100644 --- a/mode/utils/typing.py +++ b/mode/utils/typing.py @@ -3,13 +3,13 @@ import typing __all__ = [ - 'AsyncContextManager', - 'AsyncGenerator', - 'ChainMap', - 'Counter', - 'Deque', - 'NoReturn', - 'Protocol', + "AsyncContextManager", + "AsyncGenerator", + "ChainMap", + "Counter", + "Deque", + "NoReturn", + "Protocol", ] if typing.TYPE_CHECKING: diff --git a/mode/worker.py b/mode/worker.py index 07dfbb1e..1d98ab6c 100644 --- a/mode/worker.py +++ b/mode/worker.py @@ -14,11 +14,11 @@ from contextlib import contextmanager, suppress from logging import Handler, Logger from typing import ( + IO, Any, Callable, ClassVar, Dict, - IO, Iterable, Iterator, List, @@ -39,38 +39,40 @@ if typing.TYPE_CHECKING: from .debug import BlockingDetector else: - class BlockingDetector: ... # noqa -__all__ = ['Worker'] + class BlockingDetector: + ... # noqa + + +__all__ = ["Worker"] logger = logging.get_logger(__name__) -EX_OK = getattr(os, 'EX_OK', 0) +EX_OK = getattr(os, "EX_OK", 0) EX_FAILURE = 1 -EX_OSERR = getattr(os, 'EX_OSERR', 71) -BLOCK_DETECTOR = 'mode.debug:BlockingDetector' +EX_OSERR = getattr(os, "EX_OSERR", 71) +BLOCK_DETECTOR = "mode.debug:BlockingDetector" class _TupleAsListRepr(reprlib.Repr): - def repr_tuple(self, x: Tuple, level: int) -> str: return self.repr_list(cast(list, x), level) + + # this repr formats tuples as if they are lists. _repr = _TupleAsListRepr().repr # noqa: E305 @contextmanager -def exiting(*, - print_exception: bool = False, - file: IO = sys.stderr) -> Iterator[None]: +def exiting(*, print_exception: bool = False, file: IO = sys.stderr) -> Iterator[None]: try: yield except MemoryError: - sys.stderr.write('Out of memory!') + sys.stderr.write("Out of memory!") sys.exit(EX_OSERR) except Exception as exc: if print_exception: - print(f'Command raised exception: {exc!r}', file=file) + print(f"Command raised exception: {exc!r}", file=file) traceback.print_tb(exc.__traceback__, file=file) sys.exit(EX_FAILURE) sys.exit(EX_OK) @@ -106,23 +108,25 @@ class Worker(Service): _signal_stop_future: Optional[asyncio.Future] = None def __init__( - self, *services: ServiceT, - debug: bool = False, - quiet: bool = False, - logging_config: Dict = None, - loglevel: Union[str, int] = None, - logfile: Union[str, IO] = None, - redirect_stdouts: bool = True, - redirect_stdouts_level: logging.Severity = None, - stdout: Optional[IO] = sys.stdout, - stderr: Optional[IO] = sys.stderr, - console_port: int = 50101, - loghandlers: List[Handler] = None, - blocking_timeout: Seconds = 10.0, - loop: asyncio.AbstractEventLoop = None, - override_logging: bool = True, - daemon: bool = True, - **kwargs: Any) -> None: + self, + *services: ServiceT, + debug: bool = False, + quiet: bool = False, + logging_config: Dict = None, + loglevel: Union[str, int] = None, + logfile: Union[str, IO] = None, + redirect_stdouts: bool = True, + redirect_stdouts_level: logging.Severity = None, + stdout: Optional[IO] = sys.stdout, + stderr: Optional[IO] = sys.stderr, + console_port: int = 50101, + loghandlers: List[Handler] = None, + blocking_timeout: Seconds = 10.0, + loop: asyncio.AbstractEventLoop = None, + override_logging: bool = True, + daemon: bool = True, + **kwargs: Any, + ) -> None: self.services = services self.debug = debug self.quiet = quiet @@ -132,7 +136,8 @@ def __init__( self.loghandlers = loghandlers or [] self.redirect_stdouts = redirect_stdouts self.redirect_stdouts_level = logging.level_number( - redirect_stdouts_level or 'WARN') + redirect_stdouts_level or "WARN" + ) self.override_logging = override_logging self.stdout = sys.stdout if stdout is None else stdout self.stderr = sys.stderr if stderr is None else stderr @@ -154,11 +159,9 @@ def carp(self, msg: str) -> None: """Write warning to standard err.""" self._say(msg, file=self.stderr) - def _say(self, - msg: str, - file: Optional[IO] = None, - end: str = '\n', - **kwargs: Any) -> None: + def _say( + self, msg: str, file: Optional[IO] = None, end: str = "\n", **kwargs: Any + ) -> None: if file is None: file = self.stdout if not self.quiet: @@ -192,10 +195,11 @@ def _setup_logging(self) -> None: ) except Exception as exc: try: - self.stderr.write(f'CANNOT SETUP LOGGING: {exc!r} from ') + self.stderr.write(f"CANNOT SETUP LOGGING: {exc!r} from ") import traceback + traceback.print_stack(file=self.stderr) - except Exception: + except Exception: # noqa: S110 pass raise self.on_setup_root_logger(_logging.root, _loglevel) @@ -203,12 +207,9 @@ def _setup_logging(self) -> None: self._redirect_stdouts() def _redirect_stdouts(self) -> None: - self.add_context( - logging.redirect_stdouts(severity=self.redirect_stdouts_level)) + self.add_context(logging.redirect_stdouts(severity=self.redirect_stdouts_level)) - def on_setup_root_logger(self, - logger: Logger, - level: int) -> None: + def on_setup_root_logger(self, logger: Logger, level: int) -> None: ... async def maybe_start_blockdetection(self) -> None: @@ -216,7 +217,7 @@ async def maybe_start_blockdetection(self) -> None: await self.blocking_detector.maybe_start() def install_signal_handlers(self) -> None: - if sys.platform == 'win32': + if sys.platform == "win32": self._install_signal_handlers_windows() else: self._install_signal_handlers_unix() @@ -231,7 +232,7 @@ def _install_signal_handlers_unix(self) -> None: self.loop.add_signal_handler(signal.SIGUSR2, self._on_sigusr2) def _on_sigint(self) -> None: - self.carp('-INT- -INT- -INT- -INT- -INT- -INT-') + self.carp("-INT- -INT- -INT- -INT- -INT- -INT-") self._schedule_shutdown(signal.SIGINT) def _on_sigterm(self) -> None: @@ -250,18 +251,20 @@ async def _cry(self) -> None: logging.cry(file=self.stderr) def _enter_debugger(self) -> None: - self.carp('Starting debugger...') - import pdb # noqa: T100 - pdb.set_trace() # noqa: T100 + self.carp("Starting debugger...") + import pdb # noqa: T100 + + pdb.set_trace() # noqa: T100 def _schedule_shutdown(self, signal: signal.Signals) -> None: if not self._signal_stop_time: self._signal_stop_time = self.loop.time() self._signal_stop_future = asyncio.ensure_future( - self._stop_on_signal(signal), loop=self.loop) + self._stop_on_signal(signal), loop=self.loop + ) async def _stop_on_signal(self, signal: signal.Signals) -> None: - self.log.info('Signal received: %s (%s)', signal, signal.value) + self.log.info("Signal received: %s (%s)", signal, signal.value) await self.stop() maybe_cancel(self._starting_fut) @@ -269,14 +272,17 @@ def execute_from_commandline(self) -> NoReturn: self._starting_fut = None with exiting(file=self.stderr): try: - self._starting_fut = asyncio.ensure_future(self.start()) + self._starting_fut = asyncio.ensure_future( + self.start(), + loop=self.loop, + ) self.loop.run_until_complete(self._starting_fut) except asyncio.CancelledError: pass except MemoryError: raise except Exception as exc: - self.log.exception('Error: %r', exc) + self.log.exception("Error: %r", exc) raise finally: maybe_cancel(self._starting_fut) @@ -297,34 +303,35 @@ def stop_and_shutdown(self) -> None: def _shutdown_loop(self) -> None: # Gather futures created by us. - self.log.info('Gathering service tasks...') + self.log.info("Gathering service tasks...") with suppress(asyncio.CancelledError): self.loop.run_until_complete(self._gather_futures()) # Gather absolutely all asyncio futures. - self.log.info('Gathering all futures...') + self.log.info("Gathering all futures...") self._gather_all() try: # Wait until loop is fully stopped. while self.loop.is_running(): - self.log.info('Waiting for event loop to shutdown...') + self.log.info("Waiting for event loop to shutdown...") self.loop.stop() self.loop.run_until_complete(asyncio.sleep(1.0)) except BaseException as exc: - self.log.exception('Got exception while waiting: %r', exc) + self.log.exception("Got exception while waiting: %r", exc) finally: # Then close the loop. fut = asyncio.ensure_future(self._sentinel_task(), loop=self.loop) self.loop.run_until_complete(fut) self.loop.stop() - self.log.info('Closing event loop') + self.log.info("Closing event loop") self.loop.close() if self.crash_reason: self.log.critical( - 'We experienced a crash! Reraising original exception...') + "We experienced a crash! Reraising original exception..." + ) raise self.crash_reason from self.crash_reason async def _sentinel_task(self) -> None: - await asyncio.sleep(1.0, loop=self.loop) + await asyncio.sleep(1.0) def _gather_all(self) -> None: # sleeps for at most 10 * 0.1s @@ -343,8 +350,7 @@ async def _add_monitor(self) -> Any: try: import aiomonitor except ImportError: - self.log.warning( - 'Cannot start console: aiomonitor is not installed') + self.log.warning("Cannot start console: aiomonitor is not installed") else: monitor = aiomonitor.start_monitor( port=self.console_port, diff --git a/requirements/default.txt b/requirements/default.txt index 8e4ac51f..a3ce5231 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -2,3 +2,4 @@ colorlog>=2.9.0 aiocontextvars>=0.2 ; python_version<'3.7' mypy_extensions typing_extensions; python_version<'3.8' +croniter>=0.3.16 diff --git a/requirements/docs.txt b/requirements/docs.txt index c0733508..435f583f 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,3 +1,4 @@ +six sphinx>=2.2,<3.0 sphinx_celery>=1.4.8 sphinx-autodoc-annotation diff --git a/requirements/flakes.txt b/requirements/flakes.txt index f52ffd1a..1063c4f5 100644 --- a/requirements/flakes.txt +++ b/requirements/flakes.txt @@ -1,15 +1,11 @@ -flake8>=2.5.4 -flake8-blind-except +flake8>=4.0.0 +flake8-bandit flake8-bugbear flake8-builtins-unleashed -flake8-class-newline -flake8-commas flake8-comprehensions flake8-debugger -flake8-import-order flake8-logging-format flake8-mock flake8-pep3101 flake8-pyi flake8-tuple -flake8-quotes diff --git a/requirements/test.txt b/requirements/test.txt index 35b0bd75..cf848121 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,4 +1,5 @@ hypothesis>=3.31 +freezegun>=0.3.11 pytest-aiofiles>=0.2.0 pytest-asyncio>=0.8 pytest-base-url>=1.4.1 @@ -6,3 +7,4 @@ pytest-forked pytest-openfiles>=0.2.0 pytest-random-order>=0.5.4 pytest>=5.4.0 +pytz diff --git a/requirements/typecheck.txt b/requirements/typecheck.txt index 9728708c..d2f32d16 100644 --- a/requirements/typecheck.txt +++ b/requirements/typecheck.txt @@ -1,2 +1,2 @@ -mypy>=0.750 +mypy==0.782 yarl diff --git a/scripts/README.md b/scripts/README.md new file mode 100644 index 00000000..7388eac4 --- /dev/null +++ b/scripts/README.md @@ -0,0 +1,11 @@ +# Development Scripts + +* `scripts/install` - Install dependencies in a virtual environment. +* `scripts/test` - Run the test suite. +* `scripts/lint` - Run the automated code linting/formatting tools. +* `scripts/check` - Run the code linting, checking that it passes. +* `scripts/coverage` - Check that code coverage is complete. +* `scripts/build` - Build source and wheel packages. +* `scripts/publish` - Publish the latest version to PyPI. + +Styled after GitHub's ["Scripts to Rule Them All"](https://github.com/github/scripts-to-rule-them-all). diff --git a/scripts/build b/scripts/build new file mode 100644 index 00000000..7d327b5c --- /dev/null +++ b/scripts/build @@ -0,0 +1,13 @@ +#!/bin/sh -e + +if [ -d 'venv' ] ; then + PREFIX="venv/bin/" +else + PREFIX="" +fi + +set -x + +${PREFIX}python setup.py sdist bdist_wheel +${PREFIX}twine check dist/* +# ${PREFIX}mkdocs build diff --git a/scripts/check b/scripts/check new file mode 100644 index 00000000..b66d6527 --- /dev/null +++ b/scripts/check @@ -0,0 +1,14 @@ +#!/bin/sh -e + +export PREFIX="" +if [ -d 'venv' ] ; then + export PREFIX="venv/bin/" +fi +export SOURCE_FILES="faust tests setup.py" + +set -x + +${PREFIX}isort --check --diff --project=faust $SOURCE_FILES +${PREFIX}black --check --diff $SOURCE_FILES +${PREFIX}flake8 $SOURCE_FILES +# ${PREFIX}mypy $SOURCE_FILES diff --git a/scripts/clean b/scripts/clean new file mode 100644 index 00000000..01e89479 --- /dev/null +++ b/scripts/clean @@ -0,0 +1,17 @@ +#!/bin/sh -e + +if [ -d 'dist' ] ; then + rm -r dist +fi +if [ -d 'site' ] ; then + rm -r site +fi +if [ -d 'htmlcov' ] ; then + rm -r htmlcov +fi +if [ -d 'faust.egg-info' ] ; then + rm -r faust.egg-info +fi + +rm -rf .coverage.* +rm -rf *.logs diff --git a/scripts/coverage b/scripts/coverage new file mode 100644 index 00000000..ba5d4dfd --- /dev/null +++ b/scripts/coverage @@ -0,0 +1,11 @@ +#!/bin/sh -e + +export PREFIX="" +if [ -d 'venv' ] ; then + export PREFIX="venv/bin/" +fi + +set -x + +${PREFIX}coverage report --show-missing --skip-covered --fail-under=60 +codecov --token=$CODECOV_TOKEN diff --git a/scripts/docs b/scripts/docs new file mode 100644 index 00000000..4ac3beb7 --- /dev/null +++ b/scripts/docs @@ -0,0 +1,10 @@ +#!/bin/sh -e + +export PREFIX="" +if [ -d 'venv' ] ; then + export PREFIX="venv/bin/" +fi + +set -x + +${PREFIX}mkdocs serve diff --git a/scripts/install b/scripts/install new file mode 100644 index 00000000..c0e202e8 --- /dev/null +++ b/scripts/install @@ -0,0 +1,24 @@ +#!/bin/sh -e + +# Use the Python executable provided from the `-p` option, or a default. +[ "$1" = "-p" ] && PYTHON=$2 || PYTHON="python3" + +REQUIREMENTS="requirements/test.txt" +VENV="venv" + +set -x + +if [ -z "$GITHUB_ACTIONS" ]; then + "$PYTHON" -m venv "$VENV" + PIP="$VENV/bin/pip" +else + PIP="pip" +fi + +if [ -n "$GITHUB_ACTIONS" ] && [ "x$USE_CYTHON" = "xtrue" ] ; then + $PIP install Cython + FAST="[fast]" +fi + +"$PIP" install -r "$REQUIREMENTS" +"$PIP" install -e .${FAST} diff --git a/scripts/lint b/scripts/lint new file mode 100644 index 00000000..d774c1f8 --- /dev/null +++ b/scripts/lint @@ -0,0 +1,13 @@ +#!/bin/sh -e + +export PREFIX="" +if [ -d 'venv' ] ; then + export PREFIX="venv/bin/" +fi +export SOURCE_FILES="faust tests setup.py" + +set -x + +${PREFIX}autoflake --in-place --recursive $SOURCE_FILES +${PREFIX}isort --project=faust $SOURCE_FILES +${PREFIX}black $SOURCE_FILES diff --git a/scripts/publish b/scripts/publish new file mode 100644 index 00000000..5e07c69b --- /dev/null +++ b/scripts/publish @@ -0,0 +1,19 @@ +#!/bin/sh -e + +VERSION_FILE="faust/__init__.py" + +if [ -d 'venv' ] ; then + PREFIX="venv/bin/" +else + PREFIX="" +fi + +VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` + +set -x + +${PREFIX}twine upload dist/* +# ${PREFIX}mkdocs gh-deploy --force + +git tag -a v${VERSION} -m "release v${VERSION}" +git push origin v${VERSION} diff --git a/scripts/tests b/scripts/tests new file mode 100644 index 00000000..c95762d7 --- /dev/null +++ b/scripts/tests @@ -0,0 +1,19 @@ +#!/bin/sh + +export PREFIX="" +if [ -d 'venv' ] ; then + export PREFIX="venv/bin/" +fi + +set -ex + +if [ -z $GITHUB_ACTIONS ]; then + scripts/check +fi + +${PREFIX}pytest tests/unit tests/functional tests/integration tests/meticulous/ tests/regression $@ +${PREFIX}bandit -b extra/bandit/baseline.json -c extra/bandit/config.yaml -r faust + +if [ -z $GITHUB_ACTIONS ]; then + scripts/coverage +fi diff --git a/setup.cfg b/setup.cfg index c661bf05..47caec6e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [metadata] -name = mode +name = mode-streaming version = attr: mode.__version__ author = attr: mode.__author__ author_email = attr: mode.__contact__ @@ -38,11 +38,12 @@ universal = 1 [flake8] # classes can be lowercase, arguments and variables can be uppercase # whenever it makes the code more readable. -ignore = N806, N802, N801, N803, I100, I201, I202, B008, W504, G200 -inline-quotes = single -multiline-quotes = ''' -docstring-quotes = """ +ignore = N806, N802, N801, N803, I100, I201, I202, B008, W504, G200, S101, E203, E266, E501, W503 enable-extensions = G +max-line-length = 88 +per-file-ignores = + t/*: S301,S403 + #[pep257] #ignore = D102,D104,D203,D105,D213 @@ -81,3 +82,7 @@ warn_return_any = True warn_unreachable = True warn_unused_configs = True warn_unused_ignores = True + +[isort] +profile=black +known_first_party=mode diff --git a/setup.py b/setup.py index 6ea73134..34e2eb1a 100644 --- a/setup.py +++ b/setup.py @@ -3,25 +3,30 @@ import re import sys + try: import platform + _pyimp = platform.python_implementation except (AttributeError, ImportError): + def _pyimp(): - return 'Python' + return "Python" + + from setuptools import find_packages, setup -NAME = 'mode' -EXTENSIONS = {'eventlet', 'gevent', 'uvloop'} -E_UNSUPPORTED_PYTHON = '%s 1.0 requires %%s %%s or later!' % (NAME,) +NAME = "mode-streaming" +EXTENSIONS = {"eventlet", "gevent", "uvloop"} +E_UNSUPPORTED_PYTHON = "%s 1.0 requires %%s %%s or later!" % (NAME,) # noqa: S001 PYIMP = _pyimp() if sys.version_info < (3, 6): - raise Exception(E_UNSUPPORTED_PYTHON % (PYIMP, '3.6')) + raise Exception(E_UNSUPPORTED_PYTHON % (PYIMP, "3.6")) # noqa: S001 from pathlib import Path # noqa -README = Path('README.rst') +README = Path("README.rst") # -*- Classifiers -*- @@ -32,6 +37,8 @@ def _pyimp(): Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 Programming Language :: Python :: Implementation :: CPython Operating System :: POSIX Operating System :: Microsoft :: Windows @@ -41,11 +48,11 @@ def _pyimp(): Framework :: AsyncIO Intended Audience :: Developers """ -classifiers = [s.strip() for s in classes.split('\n') if s] +classifiers = [s.strip() for s in classes.split("\n") if s] # -*- Distribution Meta -*- -re_meta = re.compile(r'__(\w+?)__\s*=\s*(.*)') +re_meta = re.compile(r"__(\w+?)__\s*=\s*(.*)") re_doc = re.compile(r'^"""(.+?)"""') @@ -55,15 +62,15 @@ def add_default(m): def add_doc(m): - return (('doc', m.groups()[0]),) + return (("doc", m.groups()[0]),) pats = {re_meta: add_default, re_doc: add_doc} here = Path(__file__).parent.absolute() -with open(here / NAME / '__init__.py') as meta_fh: +with open(here / "mode" / "__init__.py") as meta_fh: meta = {} for line in meta_fh: - if line.strip() == '# -eof meta-': + if line.strip() == "# -eof meta-": break for pattern, handler in pats.items(): m = pattern.match(line.strip()) @@ -73,20 +80,20 @@ def add_doc(m): # -*- Installation Requires -*- -def strip_comments(l): - return l.split('#', 1)[0].strip() +def strip_comments(line): + return line.split("#", 1)[0].strip() def _pip_requirement(req): - if req.startswith('-r '): + if req.startswith("-r "): _, path = req.split() - return reqs(*path.split('/')) + return reqs(*path.split("/")) return [req] def _reqs(*f): - path = (Path.cwd() / 'requirements').joinpath(*f) - reqs = (strip_comments(l) for l in path.open().readlines()) + path = (Path.cwd() / "requirements").joinpath(*f) + reqs = (strip_comments(line) for line in path.open().readlines()) return [_pip_requirement(r) for r in reqs if r] @@ -96,49 +103,50 @@ def reqs(*f): def extras(*p): """Parse requirement in the requirements/extras/ directory.""" - return reqs('extras', *p) + return reqs("extras", *p) def extras_require(): """Get map of all extra requirements.""" - return {x: extras(x + '.txt') for x in EXTENSIONS} + return {x: extras(x + ".txt") for x in EXTENSIONS} # -*- Long Description -*- if README.exists(): - long_description = README.read_text(encoding='utf-8') + long_description = README.read_text(encoding="utf-8") else: - long_description = 'See http://pypi.org/project/{}'.format(NAME) + long_description = "See http://pypi.org/project/{}".format(NAME) # -*- %%% -*- packages = find_packages( - exclude=['t', 't.*', 'docs', 'docs.*', 'examples', 'examples.*'], + exclude=["t", "t.*", "docs", "docs.*", "examples", "examples.*"], ) -assert not any(package.startswith('t.') for package in packages) +assert not any(package.startswith("t.") for package in packages) setup( name=NAME, - version=meta['version'], - description=meta['doc'], - author=meta['author'], - author_email=meta['contact'], - url=meta['homepage'], - platforms=['any'], - license='BSD', - keywords='asyncio service bootsteps graph coroutine', + version=meta["version"], + description=meta["doc"], + author=meta["author"], + author_email=meta["contact"], + url=meta["homepage"], + platforms=["any"], + license="BSD", + keywords="asyncio service bootsteps graph coroutine", packages=packages, include_package_data=True, # PEP-561: https://www.python.org/dev/peps/pep-0561/ - package_data={'mode': ['py.typed']}, + package_data={"mode": ["py.typed"]}, zip_safe=False, - install_requires=reqs('default.txt'), - tests_require=reqs('test.txt'), + install_requires=reqs("default.txt"), + tests_require=reqs("test.txt"), extras_require=extras_require(), - python_requires='~=3.6', + python_requires="~=3.6", classifiers=classifiers, long_description=long_description, + long_description_content_type="text/x-rst", ) diff --git a/t/functional/test_mode.py b/t/functional/test_mode.py index e59b662d..4943e5ae 100644 --- a/t/functional/test_mode.py +++ b/t/functional/test_mode.py @@ -3,4 +3,4 @@ def test_dir(): assert dir(mode) - assert '__version__' in dir(mode) + assert "__version__" in dir(mode) diff --git a/t/functional/test_proxy.py b/t/functional/test_proxy.py index 3b9e0696..8c1e0589 100644 --- a/t/functional/test_proxy.py +++ b/t/functional/test_proxy.py @@ -1,11 +1,11 @@ import pytest + from mode import Service, label, shortlabel from mode.proxy import ServiceProxy from mode.utils.mocks import AsyncMock, MagicMock, Mock class Proxy(ServiceProxy): - def __init__(self, service, *args, **kwargs): self._proxied_service = service super().__init__(*args, **kwargs) @@ -16,11 +16,10 @@ def _service(self): class test_Proxy: - @pytest.fixture def service(self): s = Mock( - name='service', + name="service", autospec=Service, add_runtime_dependency=AsyncMock(), add_async_context=AsyncMock(), @@ -35,7 +34,7 @@ def service(self): @pytest.fixture def subservice(self): - return Mock(name='subservice') + return Mock(name="subservice") @pytest.fixture def proxy(self, *, service): @@ -130,12 +129,12 @@ def test_state(self, *, proxy, service): assert proxy.state is service.state def test_label(self, *, proxy): - assert label(proxy) == 'Proxy' + assert label(proxy) == "Proxy" def test_shortlabel(self, *, proxy): - assert shortlabel(proxy) == 'Proxy' + assert shortlabel(proxy) == "Proxy" def test_beacon(self, *, proxy, service): assert proxy.beacon is service.beacon - new_beacon = proxy.beacon = Mock(name='new_beacon') + new_beacon = proxy.beacon = Mock(name="new_beacon") assert service.beacon is new_beacon diff --git a/t/functional/test_service.py b/t/functional/test_service.py index 85870b57..0f61ea7c 100644 --- a/t/functional/test_service.py +++ b/t/functional/test_service.py @@ -1,11 +1,13 @@ import asyncio import logging from typing import ContextManager + +import pytest + +import mode from mode.utils.locks import Event from mode.utils.mocks import Mock from mode.utils.typing import AsyncContextManager -import mode -import pytest class X(mode.Service): @@ -148,7 +150,7 @@ async def crash(service, exc): async def test_crash_leaf(): async with Complex() as service: error = None - error = await crash(service.y.z, KeyError('foo')) + error = await crash(service.y.z, KeyError("foo")) # crash propagates up chain assert service.y.z.x.crash_reason is error @@ -161,7 +163,7 @@ async def test_crash_leaf(): @pytest.mark.asyncio async def test_crash_middle(): async with Complex() as service: - error = await crash(service.y, KeyError('foo')) + error = await crash(service.y, KeyError("foo")) assert service.y.z.x.crash_reason is error assert service.y.z.crash_reason is error assert service.y.crash_reason is error @@ -172,7 +174,7 @@ async def test_crash_middle(): @pytest.mark.asyncio async def test_crash_head(): async with Complex() as service: - error = await crash(service, KeyError('foo')) + error = await crash(service, KeyError("foo")) assert service.y.z.x.crash_reason is error assert service.y.z.crash_reason is error assert service.y.crash_reason is error @@ -237,7 +239,7 @@ async def sleeper(): async def crasher(): await asyncio.sleep(0.1) try: - raise RuntimeError('foo') + raise RuntimeError("foo") except RuntimeError as exc: await service.crash(exc) @@ -280,24 +282,27 @@ class MundaneLogsDefault(mode.Service): class MundaneLogsDebug(mode.Service): - mundane_level = 'debug' + mundane_level = "debug" @pytest.mark.asyncio -@pytest.mark.parametrize('service_cls,expected_level', [ - (MundaneLogsDefault, logging.INFO), - (MundaneLogsDebug, logging.DEBUG), -]) +@pytest.mark.parametrize( + "service_cls,expected_level", + [ + (MundaneLogsDefault, logging.INFO), + (MundaneLogsDebug, logging.DEBUG), + ], +) async def test_mundane_level__default(service_cls, expected_level): service = service_cls() await assert_mundane_level_is(expected_level, service) async def assert_mundane_level_is(level: int, service: mode.ServiceT) -> None: - logger = service.log = Mock(name='service.log') + logger = service.log = Mock(name="service.log") async with service: ... - severity = _find_logging_call_severity(logger.log, 'Starting...') + severity = _find_logging_call_severity(logger.log, "Starting...") assert severity == level diff --git a/t/functional/test_signals.py b/t/functional/test_signals.py index bc41de0b..b7756ca8 100644 --- a/t/functional/test_signals.py +++ b/t/functional/test_signals.py @@ -1,9 +1,11 @@ from typing import Any from weakref import ref + +import pytest + from mode import label from mode.signals import Signal, SignalT, SyncSignal, SyncSignalT from mode.utils.mocks import Mock -import pytest class X: @@ -83,19 +85,19 @@ def test_sync_signals(): @x.on_stopped.connect def my_on_stopped(self, code: int, reason: str, **kwargs: Any) -> None: - assert kwargs['signal'] == x.on_stopped + assert kwargs["signal"] == x.on_stopped on_stopped_mock(self, code, reason) @x.on_started.connect def my_on_started(self, **kwargs: Any) -> None: - assert kwargs['signal'] == x.on_started + assert kwargs["signal"] == x.on_started on_started_mock(self) x.on_started.send() on_started_mock.assert_called_once_with(x) - x.on_stopped.send(303, 'sorry not sorry') - on_stopped_mock.assert_called_once_with(x, 303, 'sorry not sorry') + x.on_stopped.send(303, "sorry not sorry") + on_stopped_mock.assert_called_once_with(x, 303, "sorry not sorry") assert on_started_mock.call_count == 1 assert x.on_started.ident @@ -127,12 +129,12 @@ def my_on_started(self, **kwargs: Any) -> None: sig4 = sig3.with_default_sender(new_sender2) assert sig4.default_sender == new_sender2 - sig4.name = '' - sig4.__set_name__(sig3, 'foo') - assert sig4.name == 'foo' + sig4.name = "" + sig4.__set_name__(sig3, "foo") + assert sig4.name == "foo" assert sig4.owner == sig3 - sig4.__set_name__(sig2, 'bar') - assert sig4.name == 'foo' + sig4.__set_name__(sig2, "bar") + assert sig4.name == "foo" assert sig4.owner == sig2 sig4.default_sender = None @@ -158,14 +160,13 @@ class X: sig = Signal() sig2 = SyncSignal() - assert X.sig.name == 'sig' + assert X.sig.name == "sig" assert X.sig.owner is X - assert X.sig2.name == 'sig2' + assert X.sig2.name == "sig2" assert X.sig2.owner is X class test_BaseSignal: - @pytest.fixture() def sig(self): return Signal() @@ -224,7 +225,6 @@ def is_alive(x): return receivers, alive_refs, dead_refs def test__is_alive(self, sig): - class Object: value = None @@ -234,9 +234,7 @@ class Object: assert sig._is_alive(ref(x)) == (True, x) def test_create_ref_methods(self, sig): - class X: - def foo(self, **kwargs): return 42 diff --git a/t/functional/test_supervisors.py b/t/functional/test_supervisors.py index e9f51d89..d0529420 100644 --- a/t/functional/test_supervisors.py +++ b/t/functional/test_supervisors.py @@ -1,5 +1,7 @@ import asyncio + import pytest + from mode import Service from mode.supervisors import ( ForfeitOneForAllSupervisor, diff --git a/t/functional/test_timers.py b/t/functional/test_timers.py index e88375d2..ccb744c5 100644 --- a/t/functional/test_timers.py +++ b/t/functional/test_timers.py @@ -1,8 +1,10 @@ import asyncio -import pytest from functools import reduce from itertools import chain from typing import List, NamedTuple, Tuple + +import pytest + from mode.timers import Timer from mode.utils.aiter import aslice from mode.utils.contexts import asynccontextmanager @@ -59,7 +61,7 @@ def sleep(self): def timer(self, *, clock, sleep) -> Timer: return Timer( self.interval, - name='test', + name="test", clock=clock, sleep=sleep, ) @@ -73,18 +75,22 @@ async def test_too_early(self, *, clock, timer, first_interval): interval = self.interval skew = self.skew intervals = [ - first_interval, # 1st interval - (None, None), # 2nd interval - (None, None), # 3rd interval - (interval - skew, None), # 4th interval: sleep too short - (None, interval + skew), # 5th interval: overlaps - (None, None), # 6th interval + first_interval, # 1st interval + (None, None), # 2nd interval + (None, None), # 3rd interval + (interval - skew, None), # 4th interval: sleep too short + (None, interval + skew), # 5th interval: overlaps + (None, None), # 6th interval ] async with self.assert_timer(timer, clock, intervals) as logger: logger.info.assert_called_once_with( - 'Timer %s woke up too early, with a drift ' - 'of -%r runtime=%r sleeptime=%r', - 'test', ANY, ANY, ANY) + "Timer %s woke up too early, with a drift " + "of -%r runtime=%r sleeptime=%r", + "test", + ANY, + ANY, + ANY, + ) assert timer.drifting == 1 assert timer.drifting_early == 1 assert not timer.drifting_late @@ -94,42 +100,45 @@ async def test_too_late(self, *, clock, timer, first_interval): interval = self.interval skew = self.skew intervals = [ - first_interval, # 1st interval - (None, None), # 2nd interval - (None, None), # 3rd interval - (interval + skew, None), # 4th interval: sleep too long - (None, interval + skew), # 5th interval: overlaps - (None, None), # 6th interval + first_interval, # 1st interval + (None, None), # 2nd interval + (None, None), # 3rd interval + (interval + skew, None), # 4th interval: sleep too long + (None, interval + skew), # 5th interval: overlaps + (None, None), # 6th interval ] async with self.assert_timer(timer, clock, intervals) as logger: logger.info.assert_called_once_with( - 'Timer %s woke up too late, with a drift ' - 'of +%r runtime=%r sleeptime=%r', - 'test', ANY, ANY, ANY) + "Timer %s woke up too late, with a drift " + "of +%r runtime=%r sleeptime=%r", + "test", + ANY, + ANY, + ANY, + ) assert timer.drifting == 1 assert timer.drifting_late == 1 assert not timer.drifting_early @asynccontextmanager - async def assert_timer(self, - timer, - clock, - interval_tuples): + async def assert_timer(self, timer, clock, interval_tuples): intervals = self.build_intervals(timer, *interval_tuples) print(intervals) clock_values = self.to_clock_values(*intervals) assert len(clock_values) == len(intervals) * 2 clock.side_effect = clock_values - with patch('mode.timers.logger') as logger: + with patch("mode.timers.logger") as logger: await self.assert_intervals(timer, intervals) yield logger - def new_interval(self, - interval: float = None, - wakeup_time: float = None, - yield_time: float = None, - expected_new_interval: float = None) -> Interval: + def new_interval( + self, + interval: float = None, + wakeup_time: float = None, + yield_time: float = None, + expected_new_interval: float = None, + ) -> Interval: if interval is None: interval = self.interval if wakeup_time is None: @@ -145,12 +154,14 @@ def new_interval(self, expected_new_interval, ) - def to_next_interval(self, - timer: Timer, - interval: Interval, - sleep_time: float = None, - yield_time: float = 0.1, - expected_new_interval: float = None) -> Interval: + def to_next_interval( + self, + timer: Timer, + interval: Interval, + sleep_time: float = None, + yield_time: float = 0.1, + expected_new_interval: float = None, + ) -> Interval: if sleep_time is None: sleep_time = interval.interval + 0.001 if yield_time is None: @@ -168,22 +179,18 @@ def to_next_interval(self, expected_new_interval=expected_new_interval, ) - def interval_to_clock_sequence(self, - interval: Interval) -> List[float]: + def interval_to_clock_sequence(self, interval: Interval) -> List[float]: # Timer calls clock() twice per iteration, # so in an interval this provides the clock for wakeup time # and the yield time. return [interval.wakeup_time, interval.yield_time] def to_clock_values(self, *intervals: Interval) -> List[float]: - return list(chain(*map( - self.interval_to_clock_sequence, intervals))) + return list(chain(*map(self.interval_to_clock_sequence, intervals))) def build_intervals( - self, - timer: Timer, - first_interval: Interval, - *values: Tuple[float, float]) -> List[Interval]: + self, timer: Timer, first_interval: Interval, *values: Tuple[float, float] + ) -> List[Interval]: """Build intervals from tuples of ``(sleep_time, yield_time)``. If a tuple is missing (is None), then default values @@ -192,8 +199,9 @@ def build_intervals( intervals = [first_interval] - def on_reduce(previous_interval: Interval, - tup: Tuple[float, float]) -> Interval: + def on_reduce( + previous_interval: Interval, tup: Tuple[float, float] + ) -> Interval: sleep_time, yield_time = tup next_interval = self.to_next_interval( timer, @@ -207,12 +215,9 @@ def on_reduce(previous_interval: Interval, reduce(on_reduce, values, first_interval) return intervals - async def assert_intervals(self, - timer: Timer, - intervals: List[Interval]) -> None: + async def assert_intervals(self, timer: Timer, intervals: List[Interval]) -> None: assert await self.consume_timer(timer, limit=len(intervals)) == [ - interval.expected_new_interval - for interval in intervals + interval.expected_new_interval for interval in intervals ] async def consume_timer(self, timer: Timer, limit: int) -> List[float]: diff --git a/t/functional/utils/test_aiter.py b/t/functional/utils/test_aiter.py index 9443dd56..3e35b618 100644 --- a/t/functional/utils/test_aiter.py +++ b/t/functional/utils/test_aiter.py @@ -1,18 +1,11 @@ from typing import AsyncIterable -from mode.utils.aiter import ( - aenumerate, - aiter, - alist, - anext, - arange, - aslice, - chunks, -) + import pytest +from mode.utils.aiter import aenumerate, aiter, alist, anext, arange, aslice, chunks -class AIT(AsyncIterable): +class AIT(AsyncIterable): async def __aiter__(self): for i in range(10): yield i @@ -53,11 +46,14 @@ async def test_aslice(): @pytest.mark.asyncio -@pytest.mark.parametrize('range_n,n,expected', [ - (11, 2, [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]]), - (11, 3, [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]), - (10, 2, [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]]), -]) +@pytest.mark.parametrize( + "range_n,n,expected", + [ + (11, 2, [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]]), + (11, 3, [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]), + (10, 2, [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]]), + ], +) async def test_chunks(range_n, n, expected): _chunks = [] async for chunk in chunks(aiter(arange(range_n)), n): diff --git a/t/functional/utils/test_collections.py b/t/functional/utils/test_collections.py index 636f58b9..ac38d108 100644 --- a/t/functional/utils/test_collections.py +++ b/t/functional/utils/test_collections.py @@ -1,5 +1,7 @@ import pickle + import pytest + from mode.utils.collections import ( AttributeDictMixin, DictAttribute, @@ -15,65 +17,61 @@ class test_FastUserDict: - @pytest.fixture() def d(self): class X(FastUserDict): - def __init__(self): self.data = {} return X() def test_fromkeys(self, d): - x = d.fromkeys(['a', 'b', 'c'], value=3) - assert x == {'a': 3, 'b': 3, 'c': 3} + x = d.fromkeys(["a", "b", "c"], value=3) + assert x == {"a": 3, "b": 3, "c": 3} assert type(x) is type(d) def test__missing__(self, d): i = 0 class X(type(d)): - def __missing__(self, key): nonlocal i i += 1 - return 'default', i + return "default", i x = X() - assert x['foo'] == ('default', 1) - assert x['foo'] == ('default', 2) - assert x['bar'] == ('default', 3) - x['foo'] = 'moo' - assert x['foo'] == 'moo' + assert x["foo"] == ("default", 1) + assert x["foo"] == ("default", 2) + assert x["bar"] == ("default", 3) + x["foo"] = "moo" + assert x["foo"] == "moo" def test_repr(self, d): - d.update({'foo': 'bar', 'baz': 300.33}) + d.update({"foo": "bar", "baz": 300.33}) assert repr(d) == repr(d.data) def test_copy(self, d): - d.update({'foo': [1, 2, 3], 'bar': 'baz'}) + d.update({"foo": [1, 2, 3], "bar": "baz"}) e = d.copy() assert e == d assert e is not d - assert e['foo'] is d['foo'], 'shallow copy' + assert e["foo"] is d["foo"], "shallow copy" def test_setgetdel(self, d): with pytest.raises(KeyError): - d['foo'] - d['foo'] = 303 - assert d['foo'] == 303 - d['foo'] = 606 - assert d['foo'] == 606 - del(d['foo']) + d["foo"] + d["foo"] = 303 + assert d["foo"] == 303 + d["foo"] = 606 + assert d["foo"] == 606 + del d["foo"] with pytest.raises(KeyError): - d['foo'] + d["foo"] def test_missing(self): m = Mock() class X(FastUserDict): - def __init__(self): self.data = {} @@ -81,44 +79,44 @@ def __missing__(self, key): return m(key) x = X() - assert x['foo'] is m.return_value - assert x['foo'] is m.return_value + assert x["foo"] is m.return_value + assert x["foo"] is m.return_value assert m.call_count == 2 def test_get(self, d): sentinel = object() - assert d.get('foo', sentinel) is sentinel - d['foo'] = 303 - assert d.get('foo') == 303 + assert d.get("foo", sentinel) is sentinel + d["foo"] = 303 + assert d.get("foo") == 303 def test_len(self, d): assert not d - d['foo'] = 1 + d["foo"] = 1 assert len(d) == 1 def test_iter(self, d): d.update(a=1, b=2, c=3) - assert list(iter(d)) == ['a', 'b', 'c'] + assert list(iter(d)) == ["a", "b", "c"] def test_contains(self, d): - assert 'foo' not in d - d['foo'] = 1 - assert 'foo' in d + assert "foo" not in d + d["foo"] = 1 + assert "foo" in d def test_clear(self, d): d.update(a=1, b=2, c=3) - assert d['a'] == 1 - assert d['b'] == 2 - assert d['c'] == 3 + assert d["a"] == 1 + assert d["b"] == 2 + assert d["c"] == 3 assert len(d) == 3 d.clear() assert not d - for k in 'a', 'b', 'c': + for k in "a", "b", "c": with pytest.raises(KeyError): d[k] def test_keys_items_values(self, d): - src = {'a': 1, 'b': 2, 'c': 3} + src = {"a": 1, "b": 2, "c": 3} d.update(src) assert list(d.keys()) == list(src.keys()) assert list(d.items()) == list(src.items()) @@ -126,9 +124,7 @@ def test_keys_items_values(self, d): class test_FastUserSet: - class X(FastUserSet): - def __init__(self): self.data = set() @@ -145,27 +141,27 @@ def test_reduce_ex(self, d): def test_pickle(self, d): d.update({1, 2, 3, 4, 5}) e = pickle.loads(pickle.dumps(d)) - assert isinstance(e, set), 'default reduce to built-in set' + assert isinstance(e, set), "default reduce to built-in set" assert d == e def test_setgetdel(self, d): - assert 'foo' not in d - d.add('foo') - assert 'foo' in d - d.discard('foo') - assert 'foo' not in d + assert "foo" not in d + d.add("foo") + assert "foo" in d + d.discard("foo") + assert "foo" not in d def test_len(self, d): assert not d - d.add('foo') + d.add("foo") assert len(d) == 1 - d.add('bar') + d.add("bar") assert len(d) == 2 def test_contains(self, d): - assert 'foo' not in d - d.add('foo') - assert 'foo' in d + assert "foo" not in d + d.add("foo") + assert "foo" in d def test_clear(self, d): d.update({1, 2, 3}) @@ -173,7 +169,7 @@ def test_clear(self, d): assert len(d) == 3 d.clear() assert not d - for k in 'a', 'b', 'c': + for k in "a", "b", "c": assert k not in d def test_and(self, d): @@ -267,8 +263,8 @@ def test_union(self, d): assert d.union({3, 4, 5}) == {1, 2, 3, 4, 5} def test_pop(self, d): - d.add('foo') - assert d.pop() == 'foo' + d.add("foo") + assert d.pop() == "foo" assert d == set() with pytest.raises(KeyError): d.pop() @@ -324,15 +320,14 @@ def test_remove(self, d): class test_ManagedUserDict: - def test_interface_on_key_get(self): - ManagedUserDict().on_key_get('k') + ManagedUserDict().on_key_get("k") def test_interface_on_key_set(self): - ManagedUserDict().on_key_set('k', 'v') + ManagedUserDict().on_key_set("k", "v") def test_interface_on_key_del(self): - ManagedUserDict().on_key_del('k') + ManagedUserDict().on_key_del("k") def test_interface_on_clear(self): ManagedUserDict().on_clear() @@ -340,7 +335,6 @@ def test_interface_on_clear(self): @pytest.fixture def d(self): class X(ManagedUserDict): - def __init__(self): self.key_get = Mock() self.key_set = Mock() @@ -364,34 +358,38 @@ def on_clear(self): def test_get_set_del(self, d): with pytest.raises(KeyError): - d['foo'] - d.key_get.assert_called_once_with('foo') - d['foo'] = 303 - d.key_set.assert_called_once_with('foo', 303) - assert d['foo'] == 303 + d["foo"] + d.key_get.assert_called_once_with("foo") + d["foo"] = 303 + d.key_set.assert_called_once_with("foo", 303) + assert d["foo"] == 303 assert d.key_get.call_count == 2 - del d['foo'] - d.key_del.assert_called_once_with('foo') + del d["foo"] + d.key_del.assert_called_once_with("foo") with pytest.raises(KeyError): - d['foo'] + d["foo"] assert d.key_get.call_count == 3 def test_update__args(self, d): - d.update({'a': 1, 'b': 2, 'c': 3}) - d.key_set.assert_has_calls([ - call('a', 1), - call('b', 2), - call('c', 3), - ]) + d.update({"a": 1, "b": 2, "c": 3}) + d.key_set.assert_has_calls( + [ + call("a", 1), + call("b", 2), + call("c", 3), + ] + ) def test_update__kwargs(self, d): d.update(a=1, b=2, c=3) - d.key_set.assert_has_calls([ - call('a', 1), - call('b', 2), - call('c', 3), - ]) + d.key_set.assert_has_calls( + [ + call("a", 1), + call("b", 2), + call("c", 3), + ] + ) def test_clear(self, d): d.update(a=1, b=2, c=3) @@ -402,16 +400,15 @@ def test_clear(self, d): def test_raw_update(self, d): d.raw_update(a=1, b=2) - assert d == {'a': 1, 'b': 2} + assert d == {"a": 1, "b": 2} class test_ManagedUserSet: - def test_interface_on_add(self): - ManagedUserSet().on_add('val') + ManagedUserSet().on_add("val") def test_interface_on_discard(self): - ManagedUserSet().on_discard('val') + ManagedUserSet().on_discard("val") def test_interface_on_clear(self): ManagedUserSet().on_clear() @@ -420,13 +417,12 @@ def test_interface_on_change(self): ManagedUserSet().on_change({1, 2}, {3, 4}) class ManagedSet(ManagedUserSet): - def __init__(self): self.data = set() - self.on_add_mock = Mock(name='on_add_mock') - self.on_discard_mock = Mock(name='on_discard_mock') - self.on_change_mock = Mock(name='on_change_mock') - self.on_clear_mock = Mock(name='on_clear') + self.on_add_mock = Mock(name="on_add_mock") + self.on_discard_mock = Mock(name="on_discard_mock") + self.on_change_mock = Mock(name="on_change_mock") + self.on_clear_mock = Mock(name="on_clear") def on_add(self, element): self.on_add_mock(element) @@ -445,30 +441,30 @@ def s(self): return self.ManagedSet() def test_add(self, *, s): - s.add('foo') - s.on_add_mock.assert_called_once_with('foo') - assert s == {'foo'} - s.add('foo') + s.add("foo") + s.on_add_mock.assert_called_once_with("foo") + assert s == {"foo"} + s.add("foo") assert s.on_add_mock.call_count == 1 def test_discard(self, *, s): - s.add('foo') - s.discard('foo') - s.on_discard_mock.assert_called_once_with('foo') - s.discard('foo') + s.add("foo") + s.discard("foo") + s.on_discard_mock.assert_called_once_with("foo") + s.discard("foo") assert s.on_discard_mock.call_count == 1 assert s == set() def test_clear(self, *, s): - s.raw_update({'foo', 'bar', 'baz'}) + s.raw_update({"foo", "bar", "baz"}) s.clear() s.on_clear_mock.assert_called_once_with() assert s == set() def test_pop(self, *, s): - s.add('foo') - assert s.pop() == 'foo' - s.on_discard_mock.assert_called_once_with('foo') + s.add("foo") + assert s.pop() == "foo" + s.on_discard_mock.assert_called_once_with("foo") assert s == set() with pytest.raises(KeyError): s.pop() @@ -536,7 +532,6 @@ def test_update(self, *, s): class test_LRUCache: - @pytest.fixture() def d(self): return LRUCache(limit=10) @@ -561,35 +556,35 @@ def test_get_set_update_pop(self, d): assert d.popitem() == (199, 199) def test_iter_keys_items_values(self, d): - d.update({'a': 1, 'b': 2, 'c': 3}) - assert list(iter(d)) == ['a', 'b', 'c'] + d.update({"a": 1, "b": 2, "c": 3}) + assert list(iter(d)) == ["a", "b", "c"] assert list(iter(d)) == list(d.keys()) assert list(d.values()) == [1, 2, 3] - assert list(d.items()) == [('a', 1), ('b', 2), ('c', 3)] + assert list(d.items()) == [("a", 1), ("b", 2), ("c", 3)] def test_incr(self, d): - d['a'] = '0' - assert d.incr('a') == 1 - assert d.incr('a') == 2 + d["a"] = "0" + assert d.incr("a") == 1 + assert d.incr("a") == 2 def test__new_lock(self, d): d.thread_safety = True - with patch('threading.RLock') as RLock: + with patch("threading.RLock") as RLock: res = d._new_lock() assert res is RLock.return_value def test_pickle(self, d): - d.update({'a': 1, 'b': 2, 'c': 3}) + d.update({"a": 1, "b": 2, "c": 3}) e = pickle.loads(pickle.dumps(d)) assert e == d class test_AttributeDictMixin: - @pytest.fixture() def d(self): class X(dict, AttributeDictMixin): ... + return X() def test_set_get(self, *, d): @@ -597,36 +592,33 @@ def test_set_get(self, *, d): d.foo d.foo = 1 assert d.foo == 1 - assert d['foo'] == 1 + assert d["foo"] == 1 class test_DictAttribute: - @pytest.fixture() def d(self): - class Object: - def __init__(self, name): self.name = name - return DictAttribute(Object('foo')) + return DictAttribute(Object("foo")) def test_get_set(self, *, d): - assert d['name'] == 'foo' - assert d.name == 'foo' + assert d["name"] == "foo" + assert d.name == "foo" assert len(d) == 1 - d.name = 'bar' - d.setdefault('name', 'baz') - assert d.get('name') == 'bar' - assert d.get('foo') is None + d.name = "bar" + d.setdefault("name", "baz") + assert d.get("name") == "bar" + assert d.get("foo") is None - d.setdefault('foo', 'moo') - assert d.foo == 'moo' + d.setdefault("foo", "moo") + assert d.foo == "moo" assert len(d) == 2 with pytest.raises(NotImplementedError): - del d['foo'] + del d["foo"] assert list(d) == dir(d.obj) assert list(d._keys()) == dir(d.obj) @@ -635,22 +627,19 @@ def test_get_set(self, *, d): def test_force_mapping(): - class Object: - def __init__(self, name): self.name = name - obj = Object('foo') - obj._wrapped = Object('bar') - assert force_mapping(obj)['name'] == 'foo' + obj = Object("foo") + obj._wrapped = Object("bar") + assert force_mapping(obj)["name"] == "foo" - with patch('mode.utils.collections.LazyObject', Object): - assert force_mapping(obj)['name'] == 'bar' + with patch("mode.utils.collections.LazyObject", Object): + assert force_mapping(obj)["name"] == "bar" class test_Heap: - def test_type_generic(self): class H(Heap[int]): pass @@ -661,46 +650,46 @@ class H(Heap[int]): def test_heap(self): h = Heap() - h.push((300, 'foo')) + h.push((300, "foo")) assert len(h) == 1 - assert h[0] == (300, 'foo') - h.push((800, 'bar')) + assert h[0] == (300, "foo") + h.push((800, "bar")) assert len(h) == 2 - assert h[0] == (300, 'foo') - assert h.pop() == (300, 'foo') + assert h[0] == (300, "foo") + assert h.pop() == (300, "foo") assert len(h) == 1 - assert h[0] == (800, 'bar') - assert h.pushpop((100, 'baz')) == (100, 'baz') + assert h[0] == (800, "bar") + assert h.pushpop((100, "baz")) == (100, "baz") assert len(h) == 1 - assert h[0] == (800, 'bar') - h.push((300, 'foo')) + assert h[0] == (800, "bar") + h.push((300, "foo")) assert len(h) == 2 - assert h.replace((400, 'xuzzy')) == (300, 'foo') + assert h.replace((400, "xuzzy")) == (300, "foo") assert len(h) == 2 - assert h[0] == (400, 'xuzzy') - h.push((300, 'foo')) + assert h[0] == (400, "xuzzy") + h.push((300, "foo")) assert len(h) == 3 - assert h[0] == (300, 'foo') + assert h[0] == (300, "foo") - assert h.nsmallest(2) == [(300, 'foo'), (400, 'xuzzy')] - assert h.nlargest(2) == [(800, 'bar'), (400, 'xuzzy')] + assert h.nsmallest(2) == [(300, "foo"), (400, "xuzzy")] + assert h.nlargest(2) == [(800, "bar"), (400, "xuzzy")] assert str(h) assert repr(h) - h.insert(0, (999, 'misplaced')) - assert h[0] == (999, 'misplaced') + h.insert(0, (999, "misplaced")) + assert h[0] == (999, "misplaced") - h[0] = (888, 'misplaced') - assert h[0] == (888, 'misplaced') - del(h[0]) - assert h[0] == (300, 'foo') + h[0] = (888, "misplaced") + assert h[0] == (888, "misplaced") + del h[0] + assert h[0] == (300, "foo") - assert h.pop() == (300, 'foo') + assert h.pop() == (300, "foo") assert len(h) == 2 assert h - assert h.pop() == (400, 'xuzzy') + assert h.pop() == (400, "xuzzy") assert len(h) == 1 - assert h.pop() == (800, 'bar') + assert h.pop() == (800, "bar") assert not len(h) assert not h diff --git a/t/functional/utils/test_compat.py b/t/functional/utils/test_compat.py index bc7f1430..0c771842 100644 --- a/t/functional/utils/test_compat.py +++ b/t/functional/utils/test_compat.py @@ -1,20 +1,27 @@ import pytest + from mode.utils.compat import isatty, want_bytes, want_str from mode.utils.mocks import Mock -@pytest.mark.parametrize('input,expected', [ - ('foo', b'foo'), - (b'foo', b'foo'), -]) +@pytest.mark.parametrize( + "input,expected", + [ + ("foo", b"foo"), + (b"foo", b"foo"), + ], +) def test_want_bytes(input, expected): assert want_bytes(input) == expected -@pytest.mark.parametrize('input,expected', [ - (b'foo', 'foo'), - ('foo', 'foo'), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (b"foo", "foo"), + ("foo", "foo"), + ], +) def test_want_str(input, expected): assert want_str(input) == expected diff --git a/t/functional/utils/test_contexts.py b/t/functional/utils/test_contexts.py index bf4b1f3d..b31392d5 100644 --- a/t/functional/utils/test_contexts.py +++ b/t/functional/utils/test_contexts.py @@ -1,4 +1,5 @@ import pytest + from mode.utils.contexts import asyncnullcontext diff --git a/t/functional/utils/test_futures.py b/t/functional/utils/test_futures.py index 97b1776e..5b34e681 100644 --- a/t/functional/utils/test_futures.py +++ b/t/functional/utils/test_futures.py @@ -1,6 +1,8 @@ import asyncio import inspect + import pytest + from mode.utils.futures import ( StampedeWrapper, done_future, @@ -30,11 +32,13 @@ async def call_commit(x): @pytest.mark.asyncio async def test_stampede(): x = X() - assert all(r == 1 for r in await asyncio.gather(*[ - call_commit(x) for _ in range(100)])) + assert all( + r == 1 for r in await asyncio.gather(*[call_commit(x) for _ in range(100)]) + ) assert x.commit_count == 1 - assert all(r == 2 for r in await asyncio.gather(*[ - call_commit(x) for _ in range(100)])) + assert all( + r == 2 for r in await asyncio.gather(*[call_commit(x) for _ in range(100)]) + ) assert x.commit_count == 2 assert await x.commit() == 3 assert x.commit_count == 3 @@ -45,7 +49,7 @@ async def test_stampede(): X.commit() assert X.commit.__wrapped__ - assert 'self' in inspect.signature(X.commit).parameters + assert "self" in inspect.signature(X.commit).parameters @pytest.mark.asyncio @@ -55,18 +59,21 @@ async def test_done_future(): def callable(): - return 'sync' + return "sync" async def async_callable(): - return 'async' + return "async" @pytest.mark.asyncio -@pytest.mark.parametrize('input,expected', [ - (callable, 'sync'), - (async_callable, 'async'), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (callable, "sync"), + (async_callable, "async"), + ], +) async def test_maybe_async(input, expected): assert await maybe_async(input()) == expected @@ -80,7 +87,6 @@ async def test_maybe_cancel(*, loop): class test_StampedeWrapper: - @pytest.mark.asyncio async def test_concurrent(self): t = Mock() @@ -96,7 +102,8 @@ async def caller(): assert all( ret is t.return_value - for ret in await asyncio.gather(*[caller() for i in range(10)])) + for ret in await asyncio.gather(*[caller() for i in range(10)]) + ) t.assert_called_once_with() @@ -116,7 +123,6 @@ async def wrapped(): @pytest.mark.asyncio async def test_raises_cancel(self): - async def wrapped(): raise asyncio.CancelledError() @@ -127,14 +133,13 @@ async def wrapped(): @pytest.mark.asyncio async def test_already_done(self): - async def wrapped(): pass x = StampedeWrapper(wrapped) - x.fut = done_future('foo') + x.fut = done_future("foo") - assert await x() == 'foo' + assert await x() == "foo" @pytest.mark.asyncio diff --git a/t/functional/utils/test_locals.py b/t/functional/utils/test_locals.py index af4eb755..0530edb6 100644 --- a/t/functional/utils/test_locals.py +++ b/t/functional/utils/test_locals.py @@ -1,7 +1,9 @@ import asyncio -import pytest -import time import threading +import time + +import pytest + from mode.utils.locals import LocalStack @@ -12,7 +14,7 @@ def __init__(self, id: int) -> None: self.id = id def __repr__(self) -> str: - return f'<{type(self).__name__}: id={self.id!r}>' + return f"<{type(self).__name__}: id={self.id!r}>" def test_typing(): @@ -94,7 +96,7 @@ def test_stack_pop__when_empty_list(): @pytest.mark.asyncio -@pytest.mark.parametrize('retry', range(3)) +@pytest.mark.parametrize("retry", range(3)) async def test_threads(retry): stack = LocalStack() @@ -103,10 +105,7 @@ def thread_enter(): for _ in range(2): loop.run_until_complete(assert_stack(stack)) - threads = [ - threading.Thread(target=thread_enter, daemon=False) - for _ in range(10) - ] + threads = [threading.Thread(target=thread_enter, daemon=False) for _ in range(10)] for thread in threads: thread.start() for thread in threads: diff --git a/t/functional/utils/test_queues.py b/t/functional/utils/test_queues.py index 7370f34b..d44d8282 100644 --- a/t/functional/utils/test_queues.py +++ b/t/functional/utils/test_queues.py @@ -1,17 +1,14 @@ import asyncio from time import monotonic + import pytest + from mode.utils.futures import done_future from mode.utils.mocks import Mock -from mode.utils.queues import ( - FlowControlEvent, - FlowControlQueue, - ThrowableQueue, -) +from mode.utils.queues import FlowControlEvent, FlowControlQueue, ThrowableQueue class test_FlowControlEvent: - def test_constructor(self): assert not FlowControlEvent(initially_suspended=True).is_active() assert FlowControlEvent(initially_suspended=False).is_active() @@ -20,12 +17,11 @@ def test_loop__default(self): assert FlowControlEvent().loop is None def test_loop__custom(self): - loop = Mock(name='loop') + loop = Mock(name="loop") assert FlowControlEvent(loop=loop).loop is loop class test_FlowControlQueue: - @pytest.mark.asyncio async def test_suspend_resume(self): flow_control = FlowControlEvent() @@ -55,7 +51,7 @@ async def test_suspend_resume__clear_on_resume(self): time_now = monotonic() await queue.put(2) assert monotonic() - time_now > 0.1 - await queue.get() == 2 + assert await queue.get() == 2 @pytest.mark.asyncio async def test_suspend_resume__initially_suspended(self): @@ -80,7 +76,6 @@ async def _resume_soon(self, timeout, flow_control): class test_ThrowableQueue: - @pytest.mark.asyncio async def test_get__throw_first_in_buffer(self): flow_control = FlowControlEvent(initially_suspended=False) @@ -92,12 +87,12 @@ async def test_get__throw_first_in_buffer(self): assert await queue.get() == 2 await queue.put(3) await queue.put(4) - await queue.throw(KeyError('foo')) + await queue.throw(KeyError("foo")) with pytest.raises(KeyError): await queue.get() assert await queue.get() == 3 assert await queue.get() == 4 - await queue.throw(ValueError('bar')) + await queue.throw(ValueError("bar")) with pytest.raises(ValueError): await queue.get() queue.clear() @@ -113,12 +108,12 @@ async def test_get_nowait_throw_first_in_buffer(self): assert queue.get_nowait() == 2 await queue.put(3) await queue.put(4) - await queue.throw(KeyError('foo')) + await queue.throw(KeyError("foo")) with pytest.raises(KeyError): queue.get_nowait() assert queue.get_nowait() == 3 assert queue.get_nowait() == 4 - await queue.throw(ValueError('bar')) + await queue.throw(ValueError("bar")) with pytest.raises(ValueError): queue.get_nowait() queue.clear() diff --git a/t/functional/utils/test_text.py b/t/functional/utils/test_text.py index 70eed51b..a9766dd7 100644 --- a/t/functional/utils/test_text.py +++ b/t/functional/utils/test_text.py @@ -1,96 +1,136 @@ import pytest + from mode.utils import text -@pytest.mark.parametrize('input,expected', [ - ('the quick brown fox', 'The Quick Brown Fox'), - ('laZy-doG', 'Lazy Dog'), - ('laZy_DOG-of-t3-moo_era', 'Lazy Dog Of T3 Moo Era'), -]) +@pytest.mark.parametrize( + "input,expected", + [ + ("the quick brown fox", "The Quick Brown Fox"), + ("laZy-doG", "Lazy Dog"), + ("laZy_DOG-of-t3-moo_era", "Lazy Dog Of T3 Moo Era"), + ], +) def test_title(input, expected): assert text.title(input) == expected -@pytest.mark.parametrize('choices,choice,expected', [ - (['foo', 'bar', 'baz'], 'boo', 'Did you mean foo?'), - (['foo', 'moo', 'bar'], 'boo', 'Did you mean one of foo, moo?'), - (['foo', 'moo', 'zoo'], 'boo', 'Did you mean one of foo, moo, zoo?'), - (['foo', 'bar', 'baz'], 'xxx', ''), -]) +@pytest.mark.parametrize( + "choices,choice,expected", + [ + (["foo", "bar", "baz"], "boo", "Did you mean foo?"), + (["foo", "moo", "bar"], "boo", "Did you mean one of foo, moo?"), + (["foo", "moo", "zoo"], "boo", "Did you mean one of foo, moo, zoo?"), + (["foo", "bar", "baz"], "xxx", ""), + ], +) def test_didyoumean(choices, choice, expected): assert text.didyoumean(choices, choice) == expected -@pytest.mark.parametrize('s,max,suffix,words,expected', [ - ('The quick brown hippopotamus jumped over the funny dog', - 27, '...', False, - 'The quick brown...'), - ('The quick brown hippopotamus jumped over the funny dog', - 27, '...', True, - 'The quick brown hippopot...'), - ('The quick brown hippopotamus jumped over the funny dog', - 1127, '...', False, - 'The quick brown hippopotamus jumped over the funny dog'), - ('The quick brown hippopotamus jumped over the funny dog', - 1127, '...', True, - 'The quick brown hippopotamus jumped over the funny dog'), -]) +@pytest.mark.parametrize( + "s,max,suffix,words,expected", + [ + ( + "The quick brown hippopotamus jumped over the funny dog", + 27, + "...", + False, + "The quick brown...", + ), + ( + "The quick brown hippopotamus jumped over the funny dog", + 27, + "...", + True, + "The quick brown hippopot...", + ), + ( + "The quick brown hippopotamus jumped over the funny dog", + 1127, + "...", + False, + "The quick brown hippopotamus jumped over the funny dog", + ), + ( + "The quick brown hippopotamus jumped over the funny dog", + 1127, + "...", + True, + "The quick brown hippopotamus jumped over the funny dog", + ), + ], +) def test_abbr(s, max, suffix, words, expected): assert text.abbr(s, max, suffix=suffix, words=words) == expected -@pytest.mark.parametrize('choices,choice,expected', [ - (['foo', 'fop', 'mop'], 'moo', 'mop'), - (['xab', 'mup', 'sop'], 'yyy', None), -]) +@pytest.mark.parametrize( + "choices,choice,expected", + [ + (["foo", "fop", "mop"], "moo", "mop"), + (["xab", "mup", "sop"], "yyy", None), + ], +) def test_fuzzymatch_best(choices, choice, expected): assert text.fuzzymatch_best(choices, choice) == expected -@pytest.mark.parametrize('origin,name,prefix,expected', [ - ('examples.simple', - 'examples.simple.Withdrawal', - '[...]', - '[...]Withdrawal'), - ('examples.other', - 'examples.simple.Withdrawal', - '[...]', - 'examples.simple.Withdrawal'), -]) +@pytest.mark.parametrize( + "origin,name,prefix,expected", + [ + ("examples.simple", "examples.simple.Withdrawal", "[...]", "[...]Withdrawal"), + ( + "examples.other", + "examples.simple.Withdrawal", + "[...]", + "examples.simple.Withdrawal", + ), + ], +) def test_abbr_fqdn(origin, name, prefix, expected): assert text.abbr_fqdn(origin, name, prefix=prefix) == expected -@pytest.mark.parametrize('n,s,suffix,expected', [ - (-2, 'argument', 's', 'arguments'), - (-1, 'argument', 's', 'arguments'), - (-0, 'argument', 's', 'arguments'), - (0, 'argument', 's', 'arguments'), - (1, 'argument', 's', 'argument'), - (2, 'argument', 's', 'arguments'), -]) +@pytest.mark.parametrize( + "n,s,suffix,expected", + [ + (-2, "argument", "s", "arguments"), + (-1, "argument", "s", "arguments"), + (-0, "argument", "s", "arguments"), + (0, "argument", "s", "arguments"), + (1, "argument", "s", "argument"), + (2, "argument", "s", "arguments"), + ], +) def test_pluralize(n, s, suffix, expected): assert text.pluralize(n, s, suffix=suffix) == expected -@pytest.mark.parametrize('s,prefix,suffix,expected', [ - ('', 'b', 'c', 'bc'), - (None, 'b', 'c', None), - ('a', 'b', 'c', 'bac'), - ('a', '', 'c', 'ac'), - ('a', 'b', '', 'ba'), - -]) +@pytest.mark.parametrize( + "s,prefix,suffix,expected", + [ + ("", "b", "c", "bc"), + (None, "b", "c", None), + ("a", "b", "c", "bac"), + ("a", "", "c", "ac"), + ("a", "b", "", "ba"), + ], +) def test_maybecat(s, prefix, suffix, expected): assert text.maybecat(s, prefix=prefix, suffix=suffix) == expected -@pytest.mark.parametrize('s,expected', [ - ('faust.utils.transformators.frobster', - 'faust.utils.transform[.]frobster'), - ('foo.bar.baz', 'foo.bar.baz'), - ('foobarbazdeliciouslybubblyfluffychocolatebar', - 'foobarbazdeliciouslybubblyfluffychocolatebar'), -]) +@pytest.mark.parametrize( + "s,expected", + [ + ("faust.utils.transformators.frobster", "faust.utils.transform[.]frobster"), + ("foo.bar.baz", "foo.bar.baz"), + ( + "foobarbazdeliciouslybubblyfluffychocolatebar", + "foobarbazdeliciouslybubblyfluffychocolatebar", + ), + ], +) def test_shorten_fqdn(s, expected): assert text.shorten_fqdn(s) == expected diff --git a/t/functional/utils/test_times.py b/t/functional/utils/test_times.py index 5f88e2d3..709f744d 100644 --- a/t/functional/utils/test_times.py +++ b/t/functional/utils/test_times.py @@ -1,10 +1,12 @@ import asyncio from datetime import timedelta from time import monotonic + import pytest + from mode.utils.times import ( - Bucket, TIME_MONOTONIC, + Bucket, TokenBucket, humanize_seconds, rate, @@ -13,32 +15,38 @@ ) -@pytest.mark.parametrize('input,expected', [ - (1.234, 1.234), - ('1.234', 1.234), - ('10/s', 10.0), - ('10/m', 0.16666666666666666), - ('8.3/m', 0.13833333333333334), - ('100/h', 0.02777777777777778), - ('1333/d', 0.01542824074074074), - (1, 1), - (timedelta(seconds=1.234), 1.234), - (None, None), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (1.234, 1.234), + ("1.234", 1.234), + ("10/s", 10.0), + ("10/m", 0.16666666666666666), + ("8.3/m", 0.13833333333333334), + ("100/h", 0.02777777777777778), + ("1333/d", 0.01542824074074074), + (1, 1), + (timedelta(seconds=1.234), 1.234), + (None, None), + ], +) def test_want_seconds(input, expected): assert want_seconds(input) == expected -@pytest.mark.parametrize('input,expected', [ - (10.1, 10.1), - ('1/s', 1.0), - ('10/s', 10.0), - ('10/m', pytest.approx(0.16666666666666666, 1e-1)), - ('10/h', pytest.approx(0.0027777777777777775, 1e-1)), - ('10/d', pytest.approx(0.00011574074074074073, 1e-1)), - (19, 19.0), - (None, 0.0), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (10.1, 10.1), + ("1/s", 1.0), + ("10/s", 10.0), + ("10/m", pytest.approx(0.16666666666666666, 1e-1)), + ("10/h", pytest.approx(0.0027777777777777775, 1e-1)), + ("10/d", pytest.approx(0.00011574074074074073, 1e-1)), + (19, 19.0), + (None, 0.0), + ], +) def test_rate(input, expected): assert rate(input) == expected @@ -90,29 +98,32 @@ async def test_rate_limit_raising(): pass -@pytest.mark.parametrize('seconds,microseconds,now,expected', [ - (4 * 60 * 60 * 24, False, None, '4.00 days'), - (1 * 60 * 60 * 24, False, None, '1.00 day'), - (4 * 60 * 60, False, None, '4.00 hours'), - (1 * 60 * 60, False, None, '1.00 hour'), - (4 * 60, False, None, '4.00 minutes'), - (1 * 60, False, None, '1.00 minute'), - (4, False, None, '4.00 seconds'), - (1, False, None, '1.00 second'), - (4.3567631221, False, None, '4.36 seconds'), - (4.3567631221, True, None, '4.36 seconds'), - (0.36, False, None, 'now'), - (0.36, False, '0 seconds', '0 seconds'), - (0.36, True, None, '0.36 seconds'), - (0, False, None, 'now'), - (0, False, None, 'now'), - (0, False, '0 seconds', '0 seconds'), -]) +@pytest.mark.parametrize( + "seconds,microseconds,now,expected", + [ + (4 * 60 * 60 * 24, False, None, "4.00 days"), + (1 * 60 * 60 * 24, False, None, "1.00 day"), + (4 * 60 * 60, False, None, "4.00 hours"), + (1 * 60 * 60, False, None, "1.00 hour"), + (4 * 60, False, None, "4.00 minutes"), + (1 * 60, False, None, "1.00 minute"), + (4, False, None, "4.00 seconds"), + (1, False, None, "1.00 second"), + (4.3567631221, False, None, "4.36 seconds"), + (4.3567631221, True, None, "4.36 seconds"), + (0.36, False, None, "now"), + (0.36, False, "0 seconds", "0 seconds"), + (0.36, True, None, "0.36 seconds"), + (0, False, None, "now"), + (0, False, None, "now"), + (0, False, "0 seconds", "0 seconds"), + ], +) def test_humanize_seconds(seconds, expected, now, microseconds): - now = now or 'now' + now = now or "now" secs = humanize_seconds(seconds, microseconds=microseconds, now=now) assert secs == expected def test_humanize_seconds__prefix(): - assert humanize_seconds(4, prefix='about ') == 'about 4.00 seconds' + assert humanize_seconds(4, prefix="about ") == "about 4.00 seconds" diff --git a/t/functional/utils/test_tracebacks.py b/t/functional/utils/test_tracebacks.py index 4dc0a13c..f788d69f 100644 --- a/t/functional/utils/test_tracebacks.py +++ b/t/functional/utils/test_tracebacks.py @@ -1,5 +1,7 @@ import asyncio + import pytest + from mode.utils.mocks import Mock, patch from mode.utils.tracebacks import Traceback, format_task_stack @@ -58,17 +60,15 @@ def moo(): class test_Traceback: - @pytest.fixture() def frame(self): - return Mock(name='frame') + return Mock(name="frame") @pytest.fixture() def tb(self, *, frame): return Traceback(frame) def test_from_coroutine__async_generator_asend(self, tb): - class async_generator_asend: f_lineno = 303 f_lasti = None @@ -76,7 +76,6 @@ class async_generator_asend: assert Traceback.from_coroutine(async_generator_asend()) def test_from_coroutine__unknown(self, tb): - class foo_frame: f_lineno = 303 f_lasti = None @@ -98,6 +97,6 @@ class coro: cr_frame = None cr_await = Mock() - with patch('asyncio.iscoroutine') as iscoroutine: + with patch("asyncio.iscoroutine") as iscoroutine: iscoroutine.return_value = True Traceback.from_coroutine(coro(), limit=10) diff --git a/t/functional/utils/test_trees.py b/t/functional/utils/test_trees.py index 2a91b17b..3889d23a 100644 --- a/t/functional/utils/test_trees.py +++ b/t/functional/utils/test_trees.py @@ -1,4 +1,5 @@ import pytest + from mode.utils.trees import Node @@ -15,7 +16,7 @@ def test_Node(): node3 = node2.new(909) assert node3.parent is node2 assert node3.parent.parent is node - assert node3.path == '303/808/909' + assert node3.path == "303/808/909" assert repr(node3) node3.children.append(10) @@ -50,13 +51,16 @@ def test_Node(): assert node5 not in node4.children node.children.append(11) - assert str(node.as_graph()) == '''\ + assert ( + str(node.as_graph()) + == """\ 303(3) 808(1) 909(0) 11(0) 808(1) - 909(0)''' + 909(0)""" + ) with pytest.raises(ValueError): node.root = node # root node cannot be itself diff --git a/t/unit/test_debug.py b/t/unit/test_debug.py index 4526b382..c7deae18 100644 --- a/t/unit/test_debug.py +++ b/t/unit/test_debug.py @@ -1,13 +1,14 @@ import signal import sys + import pytest + from mode.debug import Blocking, BlockingDetector from mode.utils.mocks import AsyncMock, Mock, patch -@pytest.mark.skipif(sys.platform == 'win32', reason='win32: no SIGALRM') +@pytest.mark.skipif(sys.platform == "win32", reason="win32: no SIGALRM") class test_BlockingDetector: - @pytest.fixture() def block(self): return BlockingDetector(timeout=10.0) @@ -24,7 +25,7 @@ def on_sleep(*args, **kwargs): await block._deadman_switch(block) def test_reset_signal(self, block): - with patch('signal.signal') as sig: + with patch("signal.signal") as sig: block._arm = Mock() block._reset_signal() @@ -37,12 +38,12 @@ def test__clear_signal(self, block): block._arm.asssert_called_once_with(0) def test__arm(self, block): - with patch('mode.debug.arm_alarm') as arm_alarm: + with patch("mode.debug.arm_alarm") as arm_alarm: block._arm(1.11) arm_alarm.assert_called_once_with(1.11) def test__on_alarm(self, block): - with patch('traceback.format_stack'): + with patch("traceback.format_stack"): block._reset_signal = Mock() with pytest.raises(Blocking): block._on_alarm(30, Mock()) diff --git a/t/unit/test_locals.py b/t/unit/test_locals.py index 6f88cb69..04300890 100644 --- a/t/unit/test_locals.py +++ b/t/unit/test_locals.py @@ -1,7 +1,5 @@ import abc import sys -import pytest - from typing import ( AbstractSet, AsyncIterable, @@ -14,6 +12,8 @@ Sequence, ) +import pytest + from mode.locals import ( AsyncContextManagerProxy, AsyncGeneratorProxy, @@ -37,31 +37,30 @@ class test_Proxy: - def test_std_class_attributes(self): - assert Proxy.__name__ == 'Proxy' - assert Proxy.__module__ == 'mode.locals' + assert Proxy.__name__ == "Proxy" + assert Proxy.__module__ == "mode.locals" assert isinstance(Proxy.__doc__, str) def test_doc(self): def real(): pass - x = Proxy(real, __doc__='foo') - assert x.__doc__ == 'foo' - def test_name(self): + x = Proxy(real, __doc__="foo") + assert x.__doc__ == "foo" + def test_name(self): def real(): """real function""" - return 'REAL' + return "REAL" - x = Proxy(lambda: real, name='xyz') - assert x.__name__ == 'xyz' + x = Proxy(lambda: real, name="xyz") + assert x.__name__ == "xyz" y = Proxy(lambda: real) - assert y.__name__ == 'real' + assert y.__name__ == "real" - assert x.__doc__ == 'real function' + assert x.__doc__ == "real function" assert x.__class__ == type(real) assert x.__dict__ == real.__dict__ @@ -70,22 +69,20 @@ def real(): def test_get_current_local(self): x = Proxy(lambda: 10) - object.__setattr__(x, '_Proxy_local', Mock()) + object.__setattr__(x, "_Proxy_local", Mock()) assert x._get_current_object() def test_bool(self): - class X(object): - def __bool__(self): return False + __nonzero__ = __bool__ x = Proxy(lambda: X()) assert not x def test_slots(self): - class X(object): __slots__ = () @@ -94,24 +91,21 @@ class X(object): x.__dict__ def test_dir(self): - class X(object): - def __dir__(self): - return ['a', 'b', 'c'] + return ["a", "b", "c"] x = Proxy(lambda: X()) - assert dir(x) == ['a', 'b', 'c'] + assert dir(x) == ["a", "b", "c"] class Y(object): - def __dir__(self): raise RuntimeError() + y = Proxy(lambda: Y()) assert dir(y) == [] def test_qualname(self): - class X: ... @@ -119,45 +113,42 @@ class X: assert x.__qualname__ == X.__qualname__ def test_getsetdel_attr(self): - class X(object): a = 1 b = 2 c = 3 def __dir__(self): - return ['a', 'b', 'c'] + return ["a", "b", "c"] v = X() x = Proxy(lambda: v) - assert x.__members__ == ['a', 'b', 'c'] + assert x.__members__ == ["a", "b", "c"] assert x.a == 1 assert x.b == 2 assert x.c == 3 - setattr(x, 'a', 10) # noqa: B010 + setattr(x, "a", 10) # noqa: B010 assert x.a == 10 - del(x.a) + del x.a assert x.a == 1 def test_dictproxy(self): v = {} x = MutableMappingProxy(lambda: v) - x['foo'] = 42 - assert x['foo'] == 42 + x["foo"] = 42 + assert x["foo"] == 42 assert len(x) == 1 - assert 'foo' in x - del(x['foo']) + assert "foo" in x + del x["foo"] with pytest.raises(KeyError): - x['foo'] + x["foo"] assert iter(x) def test_complex_cast(self): - class Object(object): - def __complex__(self): return complex(10.333) @@ -165,9 +156,7 @@ def __complex__(self): assert o.__complex__() == complex(10.333) def test_index(self): - class Object(object): - def __index__(self): return 1 @@ -175,9 +164,7 @@ def __index__(self): assert o.__index__() == 1 def test_coerce(self): - class Object(object): - def __coerce__(self, other): return self, other @@ -185,25 +172,20 @@ def __coerce__(self, other): assert o.__coerce__(3) def test_hash(self): - class X(object): - def __hash__(self): return 1234 assert hash(Proxy(lambda: X())) == 1234 def test_call(self): - class X(object): - def __call__(self): return 1234 assert CallableProxy(lambda: X())() == 1234 def test_context(self): - class X(object): entered = exited = False @@ -223,7 +205,6 @@ def __exit__(self, *exc_info): @pytest.mark.asyncio async def test_async_context(self): - class X(object): entered = exited = False @@ -242,9 +223,7 @@ async def __aexit__(self, *exc_info): assert x.exited def test_reduce(self): - class X(object): - def __reduce__(self): return 123 @@ -253,9 +232,7 @@ def __reduce__(self): class test_Proxy__cached: - def test_only_evaluated_once(self): - class X(object): attr = 123 evals = 0 @@ -279,7 +256,6 @@ def test_maybe_evaluate(self): class test_MappingProxy: - @pytest.fixture() def orig(self): return {0: 1, 2: 3, 4: 5, 6: 7, 8: 9} @@ -333,9 +309,9 @@ def test_type(self, *, s): assert isinstance(s, MutableMapping) def test_setitem(self, *, s, orig): - s[0] = 'foo' - assert s[0] == 'foo' - assert orig[0] == 'foo' + s[0] = "foo" + assert s[0] == "foo" + assert orig[0] == "foo" def test_delitem(self, *, s, orig): del s[0] @@ -371,23 +347,22 @@ def test_setdefault(self, *, s, orig): assert orig[1] == 2 def test_update__kwargs(self, *, s, orig): - s.update(foo='foo') - assert s['foo'] == 'foo' - assert orig['foo'] == 'foo' + s.update(foo="foo") + assert s["foo"] == "foo" + assert orig["foo"] == "foo" def test_update__dict(self, *, s, orig): - s.update({'foo': 'foo'}) - assert s['foo'] == 'foo' - assert orig['foo'] == 'foo' + s.update({"foo": "foo"}) + assert s["foo"] == "foo" + assert orig["foo"] == "foo" def test_update__iterable(self, *, s, orig): - s.update([('foo', 'foo')]) - assert s['foo'] == 'foo' - assert orig['foo'] == 'foo' + s.update([("foo", "foo")]) + assert s["foo"] == "foo" + assert orig["foo"] == "foo" class test_SequenceProxy: - @pytest.fixture() def orig(self): return [1, 2, 3, 4, 5, 6, 7] @@ -485,7 +460,6 @@ def test_iadd(self, *, s, orig): class test_SetProxy: - @pytest.fixture() def orig(self): return {1, 2, 3, 4, 5, 6, 7} @@ -587,7 +561,6 @@ def test_ixor(self, *, s): class test_AwaitableProxy: - async def asynfun(self): return 10 @@ -606,7 +579,6 @@ async def test_awaitable(self, *, s): class test_AsyncIterableProxy: - async def aiter(self): for i in range(10): yield i @@ -665,7 +637,7 @@ async def corogen(self): multiplier = 2 while 1: try: - val = (yield) + val = yield yield val * multiplier except self.Double: multiplier *= 2 @@ -702,7 +674,7 @@ def corogen(self): multiplier = 2 while 1: try: - val = (yield) + val = yield yield val * multiplier except self.Double: multiplier *= 2 @@ -735,9 +707,7 @@ def test_await(self): def test_Proxy_from_source(): - class AbstractSource(abc.ABC): - @abc.abstractmethod def add(self, arg): ... @@ -747,7 +717,6 @@ def mul(self, arg): ... class ConcreteSource(AbstractSource): - def __init__(self, value): self.value = value @@ -771,11 +740,9 @@ class ProxySource(Proxy[AbstractSource]): on_final_mock.assert_called_once_with() -@pytest.mark.skipif(sys.version_info < (3, 7), reason='Requires Python 3.7') +@pytest.mark.skipif(sys.version_info < (3, 7), reason="Requires Python 3.7") def test_Proxy_from_source__py37_class_argument(): - class AbstractSource(abc.ABC): - @abc.abstractmethod def add(self, arg): ... @@ -785,7 +752,6 @@ def mul(self, arg): ... class ConcreteSource(AbstractSource): - def __init__(self, value): self.value = value @@ -810,17 +776,16 @@ class ProxySource(Proxy[AbstractSource], source=AbstractSource): def test_Proxy_from_source__no_ABCMeta(): - class Source: ... with pytest.raises(TypeError): + class ProxySource(Proxy[Source]): __proxy_source__ = Source def test_Proxy_from_source__no_abstractmethods(): - class Source(abc.ABC): ... diff --git a/t/unit/test_services.py b/t/unit/test_services.py index 731088a2..ce23ee9d 100644 --- a/t/unit/test_services.py +++ b/t/unit/test_services.py @@ -1,5 +1,8 @@ import asyncio from typing import ContextManager + +import pytest + from mode import Service from mode.services import Diag, ServiceTask, WaitResult from mode.utils.logging import get_logger @@ -13,11 +16,9 @@ patch, ) from mode.utils.typing import AsyncContextManager -import pytest class S(Service): - def __post_init__(self): self.on_started_log = Mock() self.on_stopped_log = Mock() @@ -34,22 +35,20 @@ async def on_shutdown(self): class test_Diag: - @pytest.fixture() def diag(self): service = Mock() return Diag(service) def test_set_unset_flag(self, *, diag): - diag.set_flag('FOO') - assert 'FOO' in diag.flags - assert diag.last_transition['FOO'] - diag.unset_flag('FOO') - assert 'FOO' not in diag.flags + diag.set_flag("FOO") + assert "FOO" in diag.flags + assert diag.last_transition["FOO"] + diag.unset_flag("FOO") + assert "FOO" not in diag.flags class test_ServiceTask: - @pytest.fixture() def task(self): fun = AsyncMock() @@ -69,15 +68,15 @@ def test_repr(self, *, task): @pytest.mark.asyncio async def test_start_stop(): s = S() - assert s.state == 'init' + assert s.state == "init" assert await s.maybe_start() assert not await s.maybe_start() - assert s.state == 'running' + assert s.state == "running" s.on_started_log.assert_called_with() await s.stop() s.on_stopped_log.assert_called_with() s.on_shutdown_log.assert_called_with() - assert s.state == 'stopping' + assert s.state == "stopping" def test_state_stopped(): @@ -85,7 +84,7 @@ def test_state_stopped(): s._started.set() s._stopped.set() s._shutdown.set() - assert s.state == 'shutdown' + assert s.state == "shutdown" def test_should_stop_returns_true_if_crashed(): @@ -119,12 +118,11 @@ def test_repr(): @pytest.mark.asyncio async def test_subclass_can_override_Service_task(): - class ATaskService(Service): values = [] def __post_init__(self): - self.event = asyncio.Event(loop=self.loop) + self.event = asyncio.Event() @Service.task async def _background_task(self): @@ -132,7 +130,6 @@ async def _background_task(self): self.event.set() class BTaskService(ATaskService): - @Service.task async def _background_task(self): self.values.append(2) @@ -145,7 +142,6 @@ async def _background_task(self): class test_Service: - @pytest.fixture() def service(self): return S() @@ -198,7 +194,6 @@ async def test_timer(self): m = Mock() class Foo(Service): - @Service.timer(1.0) async def foo(self): m() @@ -219,26 +214,45 @@ async def itertimer(*args, **kwargs): assert m.call_count == 4 @pytest.mark.asyncio - async def test_transitions_to(self, *, service): + async def test_crontab(self): + m = Mock() + + with patch("mode.services.secs_for_next") as secs_for_next: + secs_for_next.secs_for_next.return_value = 0.1 + + class Foo(Service): + @Service.crontab("* * * * *") + async def foo(self): + m() + self._stopped.set() - @service.transitions_to('FLAG') + foo = Foo() + foo.sleep = AsyncMock() + async with foo: + await asyncio.sleep(0) + + m.assert_called_once_with() + + @pytest.mark.asyncio + async def test_transitions_to(self, *, service): + @service.transitions_to("FLAG") async def foo(self, arg, kw=1): - assert 'FLAG' in service.diag.flags + assert "FLAG" in service.diag.flags assert arg == 1 assert kw == 2 await foo(service, 1, kw=2) - assert 'FLAG' not in service.diag.flags + assert "FLAG" not in service.diag.flags @pytest.mark.asyncio async def test_transition_with(self, *, service): called = Mock() async def outer(): - assert 'FLAG' in service.diag.flags + assert "FLAG" in service.diag.flags called() - await service.transition_with('FLAG', outer()) + await service.transition_with("FLAG", outer()) called.assert_called_once_with() def test_add_dependency__no_beacon(self, *, service): @@ -281,9 +295,7 @@ async def test_remove_dependency__no_beacon(self, *, service): @pytest.mark.asyncio async def test_add_async_context__non_async(self, *, service): - class Cx(ContextManager): - def __exit__(self, *args): return None @@ -298,9 +310,7 @@ async def test_add_async_context__not_context(self, *, service): await service.add_async_context(object()) def test_add_context__is_async(self, *, service): - class Cx(AsyncContextManager): - async def __aexit__(self, *args): return None @@ -318,7 +328,7 @@ async def test__wait_stopped(self, *, service): service._stopped = Mock() service._crashed = Mock() - with patch('asyncio.wait', AsyncMock()) as wait: + with patch("asyncio.wait", AsyncMock()) as wait: f1 = Mock() f2 = Mock() f3 = Mock() @@ -326,11 +336,12 @@ async def test__wait_stopped(self, *, service): await service._wait_stopped(timeout=1.0) wait.assert_called_once_with( - [service._stopped.wait.return_value, - service._crashed.wait.return_value], + [ + service._stopped.wait.return_value, + service._crashed.wait.return_value, + ], return_when=asyncio.FIRST_COMPLETED, timeout=1.0, - loop=service.loop, ) for fut in done: @@ -378,7 +389,7 @@ async def test__actually_start__rasies_exc(self, *, service): service, init_deps=[s1], ) - service.on_start.coro.side_effect = KeyError('foo') + service.on_start.coro.side_effect = KeyError("foo") service.restart_count = 1 with pytest.raises(KeyError): await service._actually_start() @@ -405,11 +416,9 @@ async def test__actually_start__child_is_None(self, *, service): service.restart_count = 1 await service._actually_start() - def _mock_for_start(self, service, - init_deps=None, - tasks=None, - children=None, - on_async_enter=None): + def _mock_for_start( + self, service, init_deps=None, tasks=None, children=None, on_async_enter=None + ): service.on_init_dependencies = Mock(return_value=init_deps or []) service.add_dependency = Mock() service.on_first_start = AsyncMock() @@ -444,7 +453,6 @@ async def test_join_services_raises(self, *, service): s2.stop.coro.assert_called_once_with() def test_init_subclass_logger(self, *, service): - class X(Service): logger = None @@ -467,19 +475,21 @@ def test_get_set_loop(self, *, service): def test__get_tasks__no_tasks(self, *, service): class X(type(service)): ... + X._tasks = [] assert list(X()._get_tasks()) == [] @pytest.mark.asyncio async def test__execute_task__loop_is_closed(self, *, service): async def raises(): - raise RuntimeError('Event loop is closed because blah') + raise RuntimeError("Event loop is closed because blah") + await service._execute_task(raises()) @pytest.mark.asyncio async def test__execute_task__exception(self, *, service): service.crash = AsyncMock() - exc = KeyError('foo bah bar') + exc = KeyError("foo bah bar") async def raises(): raise exc @@ -490,7 +500,7 @@ async def raises(): @pytest.mark.asyncio async def test__execute_task__RuntimeError(self, *, service): service.crash = AsyncMock() - exc = RuntimeError('foo bah bar') + exc = RuntimeError("foo bah bar") async def raises(): raise exc @@ -500,9 +510,9 @@ async def raises(): @pytest.mark.asyncio async def test__execute_task__CancelledError(self, *, service): - async def raises(): raise asyncio.CancelledError() + await service._execute_task(raises()) @pytest.mark.asyncio @@ -511,6 +521,7 @@ async def test__execute_task__CancelledError_stopped(self, *, service): async def raises(): raise asyncio.CancelledError() + await service._execute_task(raises()) @pytest.mark.asyncio @@ -529,7 +540,7 @@ async def test_wait__one(self, *, service): @pytest.mark.asyncio async def test_wait_many(self, *, service): - with patch('asyncio.wait', AsyncMock()) as wait: + with patch("asyncio.wait", AsyncMock()) as wait: service._wait_one = AsyncMock() m1 = AsyncMock() m2 = AsyncMock() @@ -540,20 +551,18 @@ async def test_wait_many(self, *, service): [m1, m2], return_when=asyncio.ALL_COMPLETED, timeout=3.34, - loop=service.loop, ) - service._wait_one.assert_called_once_with( - ANY, timeout=3.34) + service._wait_one.assert_called_once_with(ANY, timeout=3.34) @pytest.mark.asyncio async def test_wait_first__propagates_exceptions(self, *, service): - exc = KeyError('foo') + exc = KeyError("foo") m1 = Mock() m1.done.return_value = True m1.exception.return_value = exc m1.result.side_effect = exc - with patch('asyncio.wait', AsyncMock()) as wait: + with patch("asyncio.wait", AsyncMock()) as wait: wait.coro.return_value = ((m1,), ()) with pytest.raises(KeyError): await service.wait_first(asyncio.sleep(5), timeout=5) @@ -566,11 +575,11 @@ async def test_wait_first__propagates_CancelledError(self, *, service): sleep = asyncio.sleep(5) to_cancel = [sleep] try: - with patch('asyncio.ensure_future') as ensure_future: + with patch("asyncio.ensure_future") as ensure_future: fut = ensure_future.return_value fut.done.return_value = False fut.cancelled.return_value = True - with patch('asyncio.wait', AsyncMock()) as wait: + with patch("asyncio.wait", AsyncMock()) as wait: wait.coro.return_value = ((m1,), ()) with pytest.raises(asyncio.CancelledError): await service.wait_first(sleep, timeout=5) @@ -627,7 +636,7 @@ async def test_crash__recursive_loop(self, *, service): service._crashed.clear() service.log.warning = Mock() service.supervisor = None - obj = Mock(data='foo', children=[Mock(), Mock(), Mock()]) + obj = Mock(data="foo", children=[Mock(), Mock(), Mock()]) service.beacon = Mock( depth=3, walk=Mock(return_value=[obj, obj]), @@ -640,7 +649,7 @@ async def test_crash__recursive_loop__no_root(self, *, service): service._crashed.clear() service.log.warning = Mock() service.supervisor = None - obj = Mock(data='foo', children=[Mock(), Mock(), Mock()]) + obj = Mock(data="foo", children=[Mock(), Mock(), Mock()]) service.beacon = Mock( depth=3, walk=Mock(return_value=[obj, obj]), @@ -658,6 +667,7 @@ def on_wait_for_futures(**kwargs): if service._maybe_wait_for_futures.call_count >= 3: service._futures.clear() raise asyncio.CancelledError() + service._maybe_wait_for_futures.coro.side_effect = on_wait_for_futures await service._gather_futures() @@ -667,8 +677,8 @@ def on_wait_for_futures(**kwargs): @pytest.mark.asyncio async def test__maybe_wait_for_futures__ValueError_left(self, *, service): service._futures = [Mock()] - with patch('asyncio.shield', AsyncMock()) as shield: - with patch('asyncio.wait', AsyncMock()): + with patch("asyncio.shield", AsyncMock()) as shield: + with patch("asyncio.wait", AsyncMock()): async def on_shield(fut, *args, **kwargs): # if we don't wait for coroutine passed to shield @@ -683,8 +693,8 @@ async def on_shield(fut, *args, **kwargs): @pytest.mark.asyncio async def test__maybe_wait_for_futures__ValueError_empty(self, *, service): service._futures = [Mock()] - with patch('asyncio.shield', AsyncMock()) as shield: - with patch('asyncio.wait', AsyncMock()): + with patch("asyncio.shield", AsyncMock()) as shield: + with patch("asyncio.wait", AsyncMock()): async def on_shield(fut, *args, **kwargs): # if we don't wait for coroutine passed to shield @@ -699,8 +709,8 @@ async def on_shield(fut, *args, **kwargs): @pytest.mark.asyncio async def test__maybe_wait_for_futures__CancelledError(self, *, service): service._futures = [Mock()] - with patch('asyncio.shield', AsyncMock()) as shield: - with patch('asyncio.wait', AsyncMock()): + with patch("asyncio.shield", AsyncMock()) as shield: + with patch("asyncio.wait", AsyncMock()): async def on_shield(fut, *args, **kwargs): # if we don't wait for coroutine passed to shield @@ -713,7 +723,7 @@ async def on_shield(fut, *args, **kwargs): @pytest.mark.asyncio async def test_itertimer(self, *, service): - with patch('mode.services.Timer') as itertimer: + with patch("mode.services.Timer") as itertimer: async def on_itertimer(*args, **kwargs): yield 1.0 @@ -728,7 +738,7 @@ async def on_itertimer(*args, **kwargs): @pytest.mark.asyncio async def test_itertimer__first_stop(self, *, service): - with patch('mode.services.Timer') as itertimer: + with patch("mode.services.Timer") as itertimer: async def on_itertimer(*args, **kwargs): service._stopped.set() @@ -740,7 +750,7 @@ async def on_itertimer(*args, **kwargs): @pytest.mark.asyncio async def test_itertimer__second_stop(self, *, service): - with patch('mode.services.Timer') as itertimer: + with patch("mode.services.Timer") as itertimer: async def on_itertimer(*args, **kwargs): for val in [0.784512, 0.2, 0.3]: @@ -749,7 +759,7 @@ async def on_itertimer(*args, **kwargs): itertimer.side_effect = on_itertimer - service.sleep = AsyncMock(name='sleep') + service.sleep = AsyncMock(name="sleep") async def on_sleep(*args, **kwargs): service._stopped.set() @@ -763,7 +773,7 @@ async def on_sleep(*args, **kwargs): @pytest.mark.asyncio async def test_itertimer__third_stop(self, *, service): - with patch('mode.services.Timer') as itertimer: + with patch("mode.services.Timer") as itertimer: async def on_itertimer(*args, **kwargs): yield 0.1341 @@ -773,7 +783,7 @@ async def on_itertimer(*args, **kwargs): itertimer.side_effect = on_itertimer - sleep = AsyncMock(name='sleep') + sleep = AsyncMock(name="sleep") values = [] async for value in service.itertimer(1.0, sleep=sleep): diff --git a/t/unit/test_supervisors.py b/t/unit/test_supervisors.py index 995c45e1..f4f5198a 100644 --- a/t/unit/test_supervisors.py +++ b/t/unit/test_supervisors.py @@ -1,5 +1,7 @@ import asyncio + import pytest + from mode.exceptions import MaxRestartsExceeded from mode.supervisors import ( CrashingSupervisor, @@ -12,7 +14,6 @@ class test_SupervisorStrategy: - @pytest.fixture() def service(self): return Mock(stop=AsyncMock()) @@ -27,13 +28,13 @@ def test_discard(self, *, sup, service): assert service not in sup._services def test_insert(self, *, sup, service): - s1 = Mock(name='s1') - s2 = Mock(name='s2') - s3 = Mock(name='s3') - s4 = Mock(name='s4') + s1 = Mock(name="s1") + s2 = Mock(name="s2") + s3 = Mock(name="s3") + s4 = Mock(name="s4") sup._services = [s1, s2, s3, s4] - s5 = Mock(name='s5') + s5 = Mock(name="s5") sup.insert(2, s5) assert sup._services == [s1, s2, s5, s4] @@ -48,11 +49,13 @@ async def test_run_until_complete(self, *, sup): @pytest.mark.asyncio async def test__supervisor__second_stop(self, *, sup, service): service.started = False - with patch('asyncio.wait_for', AsyncMock()) as wait_for: + with patch("asyncio.wait_for", AsyncMock()) as wait_for: + async def on_wait_for(*args, **kwargs): if wait_for.call_count >= 2: sup._stopped.set() raise asyncio.TimeoutError() + wait_for.coro.side_effect = on_wait_for sup.start_services = AsyncMock() @@ -67,7 +70,8 @@ async def test_on_stop(self, *, sup): sup._services = [ Mock(stop=AsyncMock()), Mock(stop=AsyncMock()), - Mock(stop=AsyncMock())] + Mock(stop=AsyncMock()), + ] sup._services[0].started = False await sup.on_stop() for s in sup._services[1:]: @@ -82,7 +86,7 @@ async def test_on_stop__raises_MemoryError(self, *, sup, service): @pytest.mark.asyncio async def test_on_stop__raises_exc(self, *, sup, service): sup.log.exception = Mock() - service.stop.coro.side_effect = KeyError('foo') + service.stop.coro.side_effect = KeyError("foo") await sup.on_stop() sup.log.exception.assert_called_once() @@ -107,14 +111,14 @@ async def test_restart_service__max_restarts(self, *, sup, service): @pytest.mark.asyncio async def test_restart_service__replacement(self, *, sup): sup._bucket = AsyncContextManagerMock() - s1 = Mock(name='s1') - s2 = Mock(name='s2') - s3 = Mock(name='s3') + s1 = Mock(name="s1") + s2 = Mock(name="s2") + s3 = Mock(name="s3") sup._services = [s1, s2, s3] sup._index = {s: i for i, s in enumerate(sup._services)} - s4 = Mock(name='s4') + s4 = Mock(name="s4") s4.supervisor = None sup.replacement = AsyncMock() sup.replacement.coro.return_value = s4 @@ -129,7 +133,6 @@ async def test_restart_service__replacement(self, *, sup): class test_OneForAllSupervisor: - @pytest.fixture() def sup(self): return OneForAllSupervisor(Mock()) @@ -140,7 +143,6 @@ async def test_restart_services__empty(self, *, sup): class test_ForfeitOneForOneSupervisor: - @pytest.fixture() def sup(self): return ForfeitOneForOneSupervisor(Mock()) @@ -151,7 +153,6 @@ async def test_restart_services__empty(self, *, sup): class test_ForfeitOneForAllSupervisor: - @pytest.fixture() def sup(self): return ForfeitOneForAllSupervisor(Mock()) @@ -162,7 +163,6 @@ async def test_restart_services__empty(self, *, sup): class test_CrashingSupervisor: - @pytest.fixture() def sup(self): return CrashingSupervisor(Mock()) diff --git a/t/unit/test_threads.py b/t/unit/test_threads.py index 7826b6d4..dccc1f72 100644 --- a/t/unit/test_threads.py +++ b/t/unit/test_threads.py @@ -1,22 +1,18 @@ import asyncio import threading + import pytest -from mode.threads import ( - MethodQueue, - QueueServiceThread, - ServiceThread, - WorkerThread, -) + +from mode.threads import MethodQueue, QueueServiceThread, ServiceThread, WorkerThread from mode.utils.futures import done_future from mode.utils.locks import Event from mode.utils.mocks import ANY, AsyncMock, Mock, patch class test_WorkerThread: - @pytest.fixture() def service(self): - return Mock(name='service') + return Mock(name="service") @pytest.fixture() def thread(self, *, service): @@ -47,25 +43,21 @@ def test_stop__not_alive(self, *, thread): class test_ServiceThread: - @pytest.fixture def loop(self, *, event_loop): return event_loop @pytest.fixture def thread_loop(self): - return Mock(name='thread_loop') + return Mock(name="thread_loop") @pytest.fixture def Worker(self): - return Mock(name='Worker') + return Mock(name="Worker") @pytest.fixture def thread(self, *, Worker, loop, thread_loop): - return ServiceThread( - Worker=Worker, - loop=loop, - thread_loop=thread_loop) + return ServiceThread(Worker=Worker, loop=loop, thread_loop=thread_loop) @pytest.mark.asyncio async def test_on_thread_stop(self, *, thread): @@ -87,7 +79,7 @@ def test_new_shutdown_event(self, *, thread): @pytest.mark.asyncio async def test_maybe_start(self, *, thread): - thread.start = AsyncMock(name='start') + thread.start = AsyncMock(name="start") thread._thread_started.set() await thread.maybe_start() thread.start.assert_not_called() @@ -98,7 +90,7 @@ async def test_maybe_start(self, *, thread): @pytest.mark.asyncio async def test_start(self, *, event_loop, thread): - thread.add_future = AsyncMock(name='thread.add_future') + thread.add_future = AsyncMock(name="thread.add_future") thread._thread_running = None assert thread.parent_loop == event_loop asyncio.ensure_future(self._wait_for_event(thread)) @@ -111,7 +103,7 @@ async def test_start(self, *, event_loop, thread): @pytest.mark.asyncio async def test_start__no_wait(self, *, event_loop, thread): - thread.add_future = AsyncMock(name='thread.add_future') + thread.add_future = AsyncMock(name="thread.add_future") thread.wait_for_thread = False thread._thread_running = None assert thread.parent_loop == event_loop @@ -136,19 +128,18 @@ async def test_start__already_started_raises(self, *, thread): await thread.start() def test_start_thread(self, *, thread): - thread._serve = Mock(name='thread._serve') - with patch('asyncio.set_event_loop') as set_event_loop: + thread._serve = Mock(name="thread._serve") + with patch("asyncio.set_event_loop") as set_event_loop: thread._start_thread() set_event_loop.assert_called_once_with(thread.loop) thread._serve.assert_called_once_with() - thread.loop.run_until_complete.assert_called_once_with( - thread._serve()) + thread.loop.run_until_complete.assert_called_once_with(thread._serve()) def test_start_thread__raises(self, *, thread): - thread._serve = Mock(name='thread._serve') - thread._serve.side_effect = KeyError('foo') + thread._serve = Mock(name="thread._serve") + thread._serve.side_effect = KeyError("foo") assert not thread._shutdown.is_set() - with patch('asyncio.set_event_loop') as set_event_loop: + with patch("asyncio.set_event_loop") as set_event_loop: with pytest.raises(KeyError): thread._start_thread() set_event_loop.assert_called_once_with(thread.loop) @@ -173,9 +164,9 @@ async def test_stop_futures(self, *, thread): @pytest.mark.asyncio async def test_shutdown_thread(self, *, thread): - thread._default_stop_children = AsyncMock(name='stop_children') - thread.on_thread_stop = AsyncMock(name='on_thread_stop') - thread._default_stop_futures = AsyncMock(name='stop_futures') + thread._default_stop_children = AsyncMock(name="stop_children") + thread.on_thread_stop = AsyncMock(name="on_thread_stop") + thread._default_stop_futures = AsyncMock(name="stop_futures") await thread._shutdown_thread() thread._default_stop_children.assert_called_once_with() @@ -189,7 +180,6 @@ async def test_shutdown_thread(self, *, thread): @pytest.mark.asyncio async def test__thread_keepalive(self, *, thread): - async def timer(interval, **kwargs): for _ in range(10): yield interval @@ -241,17 +231,17 @@ async def test_serve__Exception_no_beacon(self, *, thread): thread._shutdown_thread.assert_called_once_with() def mock_for_serve(self, thread): - thread._default_start = AsyncMock(name='start') - thread.wait_until_stopped = AsyncMock(name='wait_until_stopped') - thread.crash = AsyncMock(name='crash') - thread.on_crash = Mock(name='on_crash') - thread.beacon = Mock(name='beacon') - thread.beacon.root.data.crash = AsyncMock(name='root.crash') - thread._shutdown_thread = AsyncMock(name='shutdown_thread') + thread._default_start = AsyncMock(name="start") + thread.wait_until_stopped = AsyncMock(name="wait_until_stopped") + thread.crash = AsyncMock(name="crash") + thread.on_crash = Mock(name="on_crash") + thread.beacon = Mock(name="beacon") + thread.beacon.root.data.crash = AsyncMock(name="root.crash") + thread._shutdown_thread = AsyncMock(name="shutdown_thread") def test_on_crash(self, *, thread): - with patch('traceback.print_exc') as print_exc: - thread.on_crash('foo {0!r}', 10) + with patch("traceback.print_exc") as print_exc: + thread.on_crash("foo {0!r}", 10) print_exc.assert_called_once_with(None, ANY) @pytest.mark.asyncio @@ -267,11 +257,10 @@ async def test_crash(self, *, thread): class test_MethodQueue: - @pytest.mark.asyncio async def test_call(self): loop = asyncio.get_event_loop() - queue = MethodQueue(num_workers=2, loop=loop) + queue = MethodQueue(num_workers=2) async with queue: @@ -290,8 +279,8 @@ async def myfun(x, y): @pytest.mark.asyncio async def test_call_raising(self): loop = asyncio.get_event_loop() - queue = MethodQueue(num_workers=2, loop=loop) - all_done = asyncio.Event(loop=loop) + queue = MethodQueue(num_workers=2) + all_done = asyncio.Event() calls = 0 async with queue: @@ -321,11 +310,10 @@ async def myfun(x, y): @pytest.mark.asyncio async def test_cast(self): - loop = asyncio.get_event_loop() - queue = MethodQueue(num_workers=2, loop=loop) + queue = MethodQueue(num_workers=2) calls = 0 - all_done = asyncio.Event(loop=loop) + all_done = asyncio.Event() async with queue: @@ -341,11 +329,10 @@ async def myfun(x, y): @pytest.mark.asyncio async def test_flush(self): - loop = asyncio.get_event_loop() - queue = MethodQueue(num_workers=2, loop=loop) + queue = MethodQueue(num_workers=2) calls = 0 - all_done = asyncio.Event(loop=loop) + all_done = asyncio.Event() async def myfun(x, y): nonlocal calls @@ -360,7 +347,6 @@ async def myfun(x, y): class test_QueueServiceThread: - @pytest.fixture() def s(self): return QueueServiceThread() @@ -387,25 +373,32 @@ async def test_call_thread(self, *, s): s._method_queue = Mock(call=AsyncMock()) async def on_call(*args, **kwargs): - return done_future('value') + return done_future("value") + s._method_queue.call.side_effect = on_call s.parent_loop = Mock() fun = Mock() - ret = await s.call_thread(fun, 'arg1', 'arg2', kw1=1, kw2=2) - assert ret == 'value' + ret = await s.call_thread(fun, "arg1", "arg2", kw1=1, kw2=2) + assert ret == "value" s.parent_loop.create_future.assert_called_once_with() s._method_queue.call.assert_called_once_with( s.parent_loop.create_future(), fun, - 'arg1', 'arg2', - kw1=1, kw2=2, + "arg1", + "arg2", + kw1=1, + kw2=2, ) @pytest.mark.asyncio async def test_cast_thread(self, *, s): fun = Mock() s._method_queue = Mock(cast=AsyncMock()) - await s.cast_thread(fun, 'arg1', 'arg2', kw1=1, kw2=2) + await s.cast_thread(fun, "arg1", "arg2", kw1=1, kw2=2) s._method_queue.cast.coro.assert_called_once_with( - fun, 'arg1', 'arg2', kw1=1, kw2=2, + fun, + "arg1", + "arg2", + kw1=1, + kw2=2, ) diff --git a/t/unit/test_worker.py b/t/unit/test_worker.py index 6c8fc457..4bee6b33 100644 --- a/t/unit/test_worker.py +++ b/t/unit/test_worker.py @@ -3,22 +3,17 @@ import sys from contextlib import contextmanager from signal import Signals + +import pytest + from mode import Service from mode.debug import BlockingDetector +from mode.utils.mocks import AsyncMock, Mock, call, mask_module, patch, patch_module from mode.worker import Worker, exiting -from mode.utils.mocks import ( - AsyncMock, - Mock, - call, - mask_module, - patch, - patch_module, -) -import pytest def test_exiting(): - with patch('builtins.print') as print: + with patch("builtins.print") as print: with pytest.raises(SystemExit) as excinfo: with exiting(print_exception=True): raise KeyError() @@ -27,14 +22,12 @@ def test_exiting(): class test_Worker: - @pytest.fixture() def worker(self): - return Worker(loglevel='INFO', logfile=None) + return Worker(loglevel="INFO", logfile=None) def setup_method(self, method): - self.setup_logging_patch = patch( - 'mode.utils.logging.setup_logging') + self.setup_logging_patch = patch("mode.utils.logging.setup_logging") self.setup_logging = self.setup_logging_patch.start() def teardown_method(self): @@ -55,22 +48,22 @@ def test_constructor_aligns_beacons(self): def test_say__quiet(self, worker): worker.quiet = True - with patch('builtins.print') as print: - worker.say('msg') + with patch("builtins.print") as print: + worker.say("msg") print.assert_not_called() def test__say(self, worker): worker.quiet = False file = Mock() - with patch('builtins.print') as print: - worker._say('msg', file=file, foo=1) - print.assert_called_once_with('msg', file=file, foo=1, end='\n') + with patch("builtins.print") as print: + worker._say("msg", file=file, foo=1) + print.assert_called_once_with("msg", file=file, foo=1, end="\n") def test__say__default_file(self, worker): worker.quiet = False - with patch('builtins.print') as print: - worker._say('msg', file=None, end='.') - print.assert_called_once_with('msg', file=worker.stdout, end='.') + with patch("builtins.print") as print: + worker._say("msg", file=None, end=".") + print.assert_called_once_with("msg", file=worker.stdout, end=".") def test_on_init_dependencies(self, worker): workers = [Mock(), Mock(), Mock()] @@ -110,32 +103,35 @@ async def test_on_first_start__override_logging(self): async def test_on_execute(self, worker): await worker.on_execute() - @pytest.mark.parametrize('loghandlers', [ - [], - [Mock(), Mock()], - [Mock()], - None, - ]) + @pytest.mark.parametrize( + "loghandlers", + [ + [], + [Mock(), Mock()], + [Mock()], + None, + ], + ) def test_setup_logging(self, loghandlers): worker_inst = Worker( loglevel=5, - logfile='TEMP', + logfile="TEMP", logging_config=None, loghandlers=loghandlers, ) worker_inst._setup_logging() self.setup_logging.assert_called_once_with( loglevel=5, - logfile='TEMP', + logfile="TEMP", logging_config=None, loghandlers=loghandlers or [], ) def test_setup_logging_raises_exception(self, worker): - with patch('sys.stderr'): - with patch('traceback.print_stack') as print_stack: - with patch('mode.utils.logging.setup_logging') as sl: - sl.side_effect = KeyError('foo') + with patch("sys.stderr"): + with patch("traceback.print_stack") as print_stack: + with patch("mode.utils.logging.setup_logging") as sl: + sl.side_effect = KeyError("foo") with pytest.raises(KeyError): worker._setup_logging() @@ -145,7 +141,7 @@ def test_setup_logging_raises_exception(self, worker): def test_setup_logging__no_redirect(self, worker): worker.redirect_stdouts = False - with patch('mode.utils.logging.setup_logging'): + with patch("mode.utils.logging.setup_logging"): worker._setup_logging() def test_stop_and_shutdown(self, worker): @@ -162,8 +158,7 @@ def test_stop_and_shutdown(self, worker): worker._signal_stop_future.done.return_value = False worker.stop_and_shutdown() - worker.loop.run_until_complete.assert_called_with( - worker._signal_stop_future) + worker.loop.run_until_complete.assert_called_with(worker._signal_stop_future) @pytest.mark.asyncio async def test_maybe_start_blockdetection(self, worker): @@ -181,26 +176,27 @@ def test_instal_signal_handlers(self, worker): worker._install_signal_handlers_unix = Mock() worker.install_signal_handlers() - if sys.platform == 'win32': + if sys.platform == "win32": worker._install_signal_handlers_windows.assert_called_once_with() else: worker._install_signal_handlers_unix.assert_called_once_with() def test__install_signal_handlers_windows(self, worker): - with patch('signal.signal') as sig: + with patch("signal.signal") as sig: worker._install_signal_handlers_windows() - sig.assert_called_once_with( - signal.SIGTERM, worker._on_win_sigterm) + sig.assert_called_once_with(signal.SIGTERM, worker._on_win_sigterm) - @pytest.mark.skipif(sys.platform == 'win32', reason='win32: no SIGUSR1') + @pytest.mark.skipif(sys.platform == "win32", reason="win32: no SIGUSR1") def test__install_signal_handlers_unix(self, worker): worker.loop = Mock() worker._install_signal_handlers_unix() - worker.loop.add_signal_handler.assert_has_calls([ - call(signal.SIGINT, worker._on_sigint), - call(signal.SIGTERM, worker._on_sigterm), - call(signal.SIGUSR1, worker._on_sigusr1), - ]) + worker.loop.add_signal_handler.assert_has_calls( + [ + call(signal.SIGINT, worker._on_sigint), + call(signal.SIGTERM, worker._on_sigterm), + call(signal.SIGUSR1, worker._on_sigusr1), + ] + ) def test__on_sigint(self, worker): worker._schedule_shutdown = Mock() @@ -225,12 +221,12 @@ def test__on_sigusr1(self, worker): @pytest.mark.asyncio async def test__cry(self, worker): - with patch('mode.utils.logging.cry') as cry: + with patch("mode.utils.logging.cry") as cry: await worker._cry() cry.assert_called_once_with(file=worker.stderr) def test__schedule_shutdown(self, worker): - with patch('asyncio.ensure_future') as ensure_future: + with patch("asyncio.ensure_future") as ensure_future: worker._stop_on_signal = Mock() worker._schedule_shutdown(Signals.SIGTERM) assert worker._signal_stop_time @@ -254,7 +250,9 @@ def test_execute_from_commandline(self, worker): worker.execute_from_commandline() assert excinfo.value.code == 0 assert worker._starting_fut is ensure_future.return_value - ensure_future.assert_called_once_with(worker.start.return_value) + ensure_future.assert_called_once_with( + worker.start.return_value, loop=worker.loop + ) worker.stop_and_shutdown.assert_called_once_with() def test_execute_from_commandline__MemoryError(self, worker): @@ -273,7 +271,7 @@ def test_execute_from_commandline__CancelledError(self, worker): def test_execute_from_commandline__Exception(self, worker): with self.patch_execute(worker): - worker.start.side_effect = KeyError('foo') + worker.start.side_effect = KeyError("foo") with pytest.raises(SystemExit) as excinfo: worker.execute_from_commandline() assert excinfo.value.code > 0 @@ -283,7 +281,7 @@ def patch_execute(self, worker): worker.loop = Mock() worker.start = Mock() worker.stop_and_shutdown = Mock() - with patch('asyncio.ensure_future') as ensure_future: + with patch("asyncio.ensure_future") as ensure_future: yield ensure_future def test_on_worker_shutdown(self, worker): @@ -298,8 +296,7 @@ def test_stop_and_shutdown__stopping_worker(self, worker): worker.loop = Mock() worker.stop_and_shutdown() - worker.loop.run_until_complete.assert_called_with( - worker.stop.return_value) + worker.loop.run_until_complete.assert_called_with(worker.stop.return_value) def test__shutdown_loop(self, worker): with self.patch_shutdown_loop(worker, is_running=False): @@ -312,6 +309,7 @@ def on_loop_stop(): if worker.loop.stop.call_count >= 3: worker.loop.is_running.return_value = False return worker.loop.stop.return_value + worker.loop.stop.side_effect = on_loop_stop worker._shutdown_loop() @@ -324,10 +322,11 @@ def test__shutdown_loop__wait_raises(self, worker): def on_loop_stop(): if worker.loop.stop.call_count >= 3: - print('MOO') + print("MOO") worker.loop.stop.side_effect = None - raise ValueError('moo') + raise ValueError("moo") return worker.loop.stop.return_value + worker.loop.stop.side_effect = on_loop_stop worker._shutdown_loop() @@ -336,7 +335,7 @@ def on_loop_stop(): worker.log.exception.assert_called_once() def test__shutdown_loop__service_crashed(self, worker): - worker.crash_reason = KeyError('foo') + worker.crash_reason = KeyError("foo") with self.patch_shutdown_loop(worker, is_running=False): with pytest.raises(KeyError): worker._shutdown_loop() @@ -348,19 +347,19 @@ def patch_shutdown_loop(self, worker, is_running=False): worker._gather_all = Mock() worker.loop.is_running.return_value = is_running worker._sentinel_task = Mock() - with patch('asyncio.ensure_future') as ensure_future: - with patch('asyncio.sleep'): + with patch("asyncio.ensure_future") as ensure_future: + with patch("asyncio.sleep"): yield ensure_future @pytest.mark.asyncio async def test__sentinel_task(self, worker): - with patch('asyncio.sleep', AsyncMock()) as sleep: + with patch("asyncio.sleep", AsyncMock()) as sleep: await worker._sentinel_task() - sleep.coro.assert_called_once_with(1.0, loop=worker.loop) + sleep.coro.assert_called_once_with(1.0) def test__gather_all(self, worker): - with patch('mode.worker.all_tasks') as all_tasks: - with patch('asyncio.sleep'): + with patch("mode.worker.all_tasks") as all_tasks: + with patch("asyncio.sleep"): all_tasks.return_value = [Mock(), Mock(), Mock()] worker.loop = Mock() @@ -370,8 +369,8 @@ def test__gather_all(self, worker): task.cancel.assert_called_once_with() def test__gather_all_early(self, worker): - with patch('mode.worker.all_tasks') as all_tasks: - with patch('asyncio.sleep'): + with patch("mode.worker.all_tasks") as all_tasks: + with patch("asyncio.sleep"): worker.loop = Mock() def on_all_tasks(loop): @@ -399,12 +398,14 @@ async def test_on_started(self, worker): @pytest.mark.asyncio async def test__add_monitor(self, worker): worker.add_context = Mock() - with patch_module('aiomonitor'): + with patch_module("aiomonitor"): import aiomonitor + await worker._add_monitor() worker.add_context.assert_called_once_with( - aiomonitor.start_monitor.return_value) + aiomonitor.start_monitor.return_value + ) aiomonitor.start_monitor.assert_called_once_with( port=worker.console_port, @@ -414,7 +415,7 @@ async def test__add_monitor(self, worker): @pytest.mark.asyncio async def test__add_monitor__no_aiomonitor(self, worker): worker.log.warning = Mock() - with mask_module('aiomonitor'): + with mask_module("aiomonitor"): await worker._add_monitor() worker.log.warning.assert_called_once() diff --git a/t/unit/utils/test_cron.py b/t/unit/utils/test_cron.py new file mode 100644 index 00000000..3d62b2da --- /dev/null +++ b/t/unit/utils/test_cron.py @@ -0,0 +1,31 @@ +import pytz +from freezegun import freeze_time + +from mode.utils.cron import secs_for_next + +SECS_IN_HOUR = 60 * 60 + + +@freeze_time("2000-01-01 00:00:00") +def test_secs_for_next(): + every_minute_cron_format = "*/1 * * * *" + assert secs_for_next(every_minute_cron_format) == 60 + + every_8pm_cron_format = "0 20 * * *" + assert secs_for_next(every_8pm_cron_format) == 20 * SECS_IN_HOUR + + every_4th_july_1pm_cron_format = "0 13 4 7 *" + days_until_4th_july = 31 + 28 + 31 + 30 + 31 + 30 + 4 + secs_until_4th_july = SECS_IN_HOUR * 24 * days_until_4th_july + secs_until_1_pm = 13 * SECS_IN_HOUR + total_secs = secs_until_4th_july + secs_until_1_pm + assert secs_for_next(every_4th_july_1pm_cron_format) == total_secs + + +@freeze_time("2000-01-01 00:00:00") +def test_secs_for_next_with_tz(): + pacific = pytz.timezone("US/Pacific") + + every_8pm_cron_format = "0 20 * * *" + # In Pacific time it's 16:00 so only 4 hours until 8:00pm + assert secs_for_next(every_8pm_cron_format, tz=pacific) == 4 * SECS_IN_HOUR diff --git a/t/unit/utils/test_imports.py b/t/unit/utils/test_imports.py index 5464b8fb..05cb51cb 100644 --- a/t/unit/utils/test_imports.py +++ b/t/unit/utils/test_imports.py @@ -1,7 +1,9 @@ import os import sys -import pytest from contextlib import contextmanager + +import pytest + from mode.utils.imports import ( EntrypointExtension, FactoryMapping, @@ -18,64 +20,65 @@ class test_FactoryMapping: - @pytest.fixture() def map(self): - return FactoryMapping({ - 'redis': 'my.drivers.RedisDriver', - 'rabbitmq': 'my.drivers.RabbitDriver', - }) + return FactoryMapping( + { + "redis": "my.drivers.RedisDriver", + "rabbitmq": "my.drivers.RabbitDriver", + } + ) def test_constructor(self, *, map): - assert map.aliases['redis'] - assert map.aliases['rabbitmq'] + assert map.aliases["redis"] + assert map.aliases["rabbitmq"] def test_iterate(self, *, map): - map.by_name = Mock(name='by_name') + map.by_name = Mock(name="by_name") map._maybe_finalize = Mock() classes = list(map.iterate()) map._maybe_finalize.assert_called_once_with() - map.by_name.assert_has_calls([call('redis'), call('rabbitmq')]) + map.by_name.assert_has_calls([call("redis"), call("rabbitmq")]) assert classes == [map.by_name(), map.by_name()] def test_by_url(self, *, map): - map.by_name = Mock(name='by_name') - cls = map.by_url('redis://localhost:6379//1') + map.by_name = Mock(name="by_name") + cls = map.by_url("redis://localhost:6379//1") assert cls is map.by_name.return_value - map.by_name.assert_called_once_with('redis') + map.by_name.assert_called_once_with("redis") def test_by_name(self, *, map): map._maybe_finalize = Mock() - with patch('mode.utils.imports.symbol_by_name') as sbn: - cls = map.by_name('redis') + with patch("mode.utils.imports.symbol_by_name") as sbn: + cls = map.by_name("redis") assert cls is sbn.return_value - sbn.assert_called_once_with('redis', aliases=map.aliases) + sbn.assert_called_once_with("redis", aliases=map.aliases) map._maybe_finalize.assert_called_once_with() def test_by_name__ModuleNotFound(self, *, map): map._maybe_finalize = Mock() - with patch('mode.utils.imports.symbol_by_name') as sbn: + with patch("mode.utils.imports.symbol_by_name") as sbn: sbn.side_effect = ModuleNotFoundError() with pytest.raises(ModuleNotFoundError): - map.by_name('redis') + map.by_name("redis") map._maybe_finalize.assert_called_once_with() def test_by_name__ModuleNotFound_dotname(self, *, map): map._maybe_finalize = Mock() - with patch('mode.utils.imports.symbol_by_name') as sbn: + with patch("mode.utils.imports.symbol_by_name") as sbn: sbn.side_effect = ModuleNotFoundError() with pytest.raises(ModuleNotFoundError): - map.by_name('redis.foo') + map.by_name("redis.foo") map._maybe_finalize.assert_called_once_with() def test_get_alias(self, *, map): map._maybe_finalize = Mock() - assert map.get_alias('redis') == 'my.drivers.RedisDriver' + assert map.get_alias("redis") == "my.drivers.RedisDriver" map._maybe_finalize.assert_called_once_with() def test_include_setuptools_namespace(self, *, map): - map.include_setuptools_namespace('foo.bar.baz') - assert 'foo.bar.baz' in map.namespaces + map.include_setuptools_namespace("foo.bar.baz") + assert "foo.bar.baz" in map.namespaces def test__maybe_finalize(self, *, map): map._finalize = Mock() @@ -87,11 +90,11 @@ def test__maybe_finalize(self, *, map): map._finalize.assert_called_once_with() def test__finalize(self, *, map): - map.namespaces = {'foo'} + map.namespaces = {"foo"} with patch_iter_entry_points(): map._finalize() - assert map.aliases['ep1'] == 'foo:a' - assert map.aliases['ep2'] == 'bar:c' + assert map.aliases["ep1"] == "foo:a" + assert map.aliases["ep2"] == "bar:c" def test_data(self, *, map): assert map.data is map.aliases @@ -99,22 +102,22 @@ def test_data(self, *, map): def test__ensure_identifier(): with pytest.raises(ValueError): - _ensure_identifier('foo.bar.{baz}', 'full') + _ensure_identifier("foo.bar.{baz}", "full") class test_symbol_by_name: - @pytest.fixture() def imp(self): - return Mock(name='imp') + return Mock(name="imp") def test_missing_module(self): with pytest.raises(ValueError): - symbol_by_name(':foo') + symbol_by_name(":foo") def test_missing_module_but_valid_package(self): from mode.utils import logging - assert symbol_by_name('.logging', package='mode.utils') is logging + + assert symbol_by_name(".logging", package="mode.utils") is logging def test_already_object(self): obj = object() @@ -123,120 +126,121 @@ def test_already_object(self): def test_when_ValueError(self, *, imp): imp.side_effect = ValueError with pytest.raises(ValueError): - symbol_by_name('foo.bar:Baz', imp=imp) + symbol_by_name("foo.bar:Baz", imp=imp) - @pytest.mark.parametrize('exc', [AttributeError, ImportError]) + @pytest.mark.parametrize("exc", [AttributeError, ImportError]) def test_when_ImportError(self, exc, *, imp): imp.side_effect = exc() with pytest.raises(exc): - symbol_by_name('foo.bar:Baz', imp=imp, default=None) + symbol_by_name("foo.bar:Baz", imp=imp, default=None) - @pytest.mark.parametrize('exc', [AttributeError, ImportError]) + @pytest.mark.parametrize("exc", [AttributeError, ImportError]) def test_when_ImportError__with_default(self, exc, *, imp): imp.side_effect = exc() - assert symbol_by_name('foo.bar:Baz', imp=imp, default='f') == 'f' + assert symbol_by_name("foo.bar:Baz", imp=imp, default="f") == "f" def test_module(self): - assert symbol_by_name('os') is os + assert symbol_by_name("os") is os def test_symbol_by_name__module_attr(self): - assert symbol_by_name('sys.version_info') is sys.version_info - assert symbol_by_name('sys:version_info') is sys.version_info + assert symbol_by_name("sys.version_info") is sys.version_info + assert symbol_by_name("sys:version_info") is sys.version_info def test_smart_import(): - assert smart_import('os') is os - assert smart_import('sys:version_info') is sys.version_info - assert smart_import('sys.version_info') + assert smart_import("os") is os + assert smart_import("sys:version_info") is sys.version_info + assert smart_import("sys.version_info") with pytest.raises(AttributeError): - smart_import('sys:foobarbazbazasdqwewqrewqfadf') + smart_import("sys:foobarbazbazasdqwewqrewqfadf") def test_load_extension_classes(): with patch_iter_entry_points(): - with patch('mode.utils.imports.symbol_by_name') as sbn: - assert list(load_extension_classes('foo')) == [ - EntrypointExtension('ep1', sbn.return_value), - EntrypointExtension('ep2', sbn.return_value), + with patch("mode.utils.imports.symbol_by_name") as sbn: + assert list(load_extension_classes("foo")) == [ + EntrypointExtension("ep1", sbn.return_value), + EntrypointExtension("ep2", sbn.return_value), ] - sbn.assert_has_calls([call('foo:a'), call('bar:c')]) + sbn.assert_has_calls([call("foo:a"), call("bar:c")]) def test_load_extension_classes_syntax_error(): with patch_iter_entry_points(): - with patch('mode.utils.imports.symbol_by_name') as sbn: + with patch("mode.utils.imports.symbol_by_name") as sbn: sbn.side_effect = SyntaxError() with pytest.warns(UserWarning): - assert list(load_extension_classes('foo')) == [] + assert list(load_extension_classes("foo")) == [] def test_load_extension_class_names(): with patch_iter_entry_points(): - assert list(load_extension_class_names('foo')) == [ + assert list(load_extension_class_names("foo")) == [ RawEntrypointExtension( - 'ep1', - 'foo:a', + "ep1", + "foo:a", ), RawEntrypointExtension( - 'ep2', - 'bar:c', + "ep2", + "bar:c", ), ] @contextmanager def patch_iter_entry_points(): - with patch('pkg_resources.iter_entry_points') as iter_entry_points: - ep1 = Mock(name='ep1') - ep1.name = 'ep1' - ep1.module_name = 'foo' - ep1.attrs = ['a', 'b'] - ep2 = Mock(name='ep2') - ep2.name = 'ep2' - ep2.module_name = 'bar' - ep2.attrs = ['c', 'd'] + with patch("pkg_resources.iter_entry_points") as iter_entry_points: + ep1 = Mock(name="ep1") + ep1.name = "ep1" + ep1.module_name = "foo" + ep1.attrs = ["a", "b"] + ep2 = Mock(name="ep2") + ep2.name = "ep2" + ep2.module_name = "bar" + ep2.attrs = ["c", "d"] iter_entry_points.return_value = [ - ep1, ep2, + ep1, + ep2, ] yield def test_load_extension_class_names__no_pkg_resources(): - with mask_module('pkg_resources'): - assert list(load_extension_class_names('foo')) == [] + with mask_module("pkg_resources"): + assert list(load_extension_class_names("foo")) == [] def test_cwd_in_path(): - with patch('os.getcwd') as getcwd: - getcwd.return_value = 'bar' - with patch('sys.path', ['foo', 'moo', 'baz']): + with patch("os.getcwd") as getcwd: + getcwd.return_value = "bar" + with patch("sys.path", ["foo", "moo", "baz"]): with cwd_in_path(): - assert 'bar' in sys.path - assert 'bar' not in sys.path + assert "bar" in sys.path + assert "bar" not in sys.path def test_cwd_in_path__already_in_path(): - with patch('os.getcwd') as getcwd: - getcwd.return_value = 'bar' - with patch('sys.path', ['foo', 'bar', 'baz']): + with patch("os.getcwd") as getcwd: + getcwd.return_value = "bar" + with patch("sys.path", ["foo", "bar", "baz"]): with cwd_in_path(): - assert 'bar' in sys.path - assert 'bar' in sys.path + assert "bar" in sys.path + assert "bar" in sys.path def test_import_from_cwd(): - with patch('mode.utils.imports.cwd_in_path'): - with patch('importlib.import_module') as import_module: - res = import_from_cwd('.foo', package='baz') + with patch("mode.utils.imports.cwd_in_path"): + with patch("importlib.import_module") as import_module: + res = import_from_cwd(".foo", package="baz") assert res is import_module.return_value - import_module.assert_called_once_with('.foo', package='baz') + import_module.assert_called_once_with(".foo", package="baz") def test_import_from_cwd__custom_imp(): - imp = Mock(name='imp') - with patch('importlib.import_module'): - res = import_from_cwd('.foo', package='baz', imp=imp) + imp = Mock(name="imp") + with patch("importlib.import_module"): + res = import_from_cwd(".foo", package="baz", imp=imp) assert res is imp.return_value - imp.assert_called_once_with('.foo', package='baz') + imp.assert_called_once_with(".foo", package="baz") diff --git a/t/unit/utils/test_logging.py b/t/unit/utils/test_logging.py index 39682c4b..745ccd17 100644 --- a/t/unit/utils/test_logging.py +++ b/t/unit/utils/test_logging.py @@ -2,10 +2,10 @@ import io import logging import sys -import pytest - from copy import deepcopy +import pytest + from mode.utils.logging import ( HAS_STACKLEVEL, CompositeLogger, @@ -32,91 +32,93 @@ def test__logger_config(): - assert _logger_config([1, 2], level='WARNING') == { - 'handlers': [1, 2], - 'level': 'WARNING', + assert _logger_config([1, 2], level="WARNING") == { + "handlers": [1, 2], + "level": "WARNING", } def log_called_with(logger, *args, stacklevel, **kwargs): if HAS_STACKLEVEL: - logger.log.assert_called_once_with( - *args, stacklevel=stacklevel, **kwargs) + logger.log.assert_called_once_with(*args, stacklevel=stacklevel, **kwargs) else: - logger.log.assert_called_once_with( *args, **kwargs) + logger.log.assert_called_once_with(*args, **kwargs) def formatter_called_with(formatter, *args, stacklevel, **kwargs): if HAS_STACKLEVEL: - formatter.assert_called_once_with( - *args, stacklevel=stacklevel, **kwargs) + formatter.assert_called_once_with(*args, stacklevel=stacklevel, **kwargs) else: - formatter.assert_called_once_with( *args, **kwargs) + formatter.assert_called_once_with(*args, **kwargs) -class test_CompositeLogger: +class test_CompositeLogger: @pytest.fixture() def logger(self): - return Mock(name='logger') + return Mock(name="logger") @pytest.fixture() def formatter(self): - return Mock(name='formatter') + return Mock(name="formatter") @pytest.fixture() def log(self, *, logger, formatter): return CompositeLogger(logger=logger, formatter=formatter) def test_log(self, *, log, logger, formatter): - log.log(logging.INFO, 'msg', 1, kw=2) + log.log(logging.INFO, "msg", 1, kw=2) log_called_with( logger, logging.INFO, formatter.return_value, - 1, stacklevel=2, kw=2, + 1, + stacklevel=2, + kw=2, ) - formatter_called_with( - formatter, logging.INFO, 'msg', 1, kw=2, stacklevel=2) + formatter_called_with(formatter, logging.INFO, "msg", 1, kw=2, stacklevel=2) def test_log__no_formatter(self, *, log, logger): log.formatter = None - log.log(logging.INFO, 'msg', 1, kw=2) + log.log(logging.INFO, "msg", 1, kw=2) log_called_with( logger, logging.INFO, - 'msg', - 1, kw=2, stacklevel=2, + "msg", + 1, + kw=2, + stacklevel=2, ) - @pytest.mark.parametrize('method,severity,extra', [ - ('debug', logging.DEBUG, {}), - ('info', logging.INFO, {}), - ('warn', logging.WARN, {}), - ('warning', logging.WARN, {}), - ('error', logging.ERROR, {}), - ('crit', logging.CRITICAL, {}), - ('critical', logging.CRITICAL, {}), - ('exception', logging.ERROR, {'exc_info': 1}), - ]) + @pytest.mark.parametrize( + "method,severity,extra", + [ + ("debug", logging.DEBUG, {}), + ("info", logging.INFO, {}), + ("warn", logging.WARN, {}), + ("warning", logging.WARN, {}), + ("error", logging.ERROR, {}), + ("crit", logging.CRITICAL, {}), + ("critical", logging.CRITICAL, {}), + ("exception", logging.ERROR, {"exc_info": 1}), + ], + ) def test_severity_mixin(self, method, severity, extra, *, log, logger): log.formatter = None - getattr(log, method)('msg', 'arg1', kw1=3, kw2=5) + getattr(log, method)("msg", "arg1", kw1=3, kw2=5) log_called_with( - logger, - severity, 'msg', 'arg1', kw1=3, kw2=5, stacklevel=3, **extra) + logger, severity, "msg", "arg1", kw1=3, kw2=5, stacklevel=3, **extra + ) def test_dev__enabled(self, log): log.log = Mock() - with patch('mode.utils.logging.DEVLOG', True): - log.dev('msg', 1, k=2) - log_called_with( - log, - logging.INFO, 'msg', 1, k=2, stacklevel=3) + with patch("mode.utils.logging.DEVLOG", True): + log.dev("msg", 1, k=2) + log_called_with(log, logging.INFO, "msg", 1, k=2, stacklevel=3) def test_dev__disabled(self, log): log.info = Mock() - with patch('mode.utils.logging.DEVLOG', False): - log.dev('msg', 1, k=2) + with patch("mode.utils.logging.DEVLOG", False): + log.dev("msg", 1, k=2) log.info.assert_not_called() @@ -131,44 +133,53 @@ def test_formatter(): def test_DefaultFormatter(): record = logging.LogRecord( - 'name', logging.INFO, 'path', 303, 'msg', - {'foo': 1, 'extra': {'data': {'moo': 30, 'baz': [1, 2]}}}, + "name", + logging.INFO, + "path", + 303, + "msg", + {"foo": 1, "extra": {"data": {"moo": 30, "baz": [1, 2]}}}, exc_info=None, ) DefaultFormatter().format(record) -@pytest.mark.parametrize('input,expected', [ - ('DEBUG', logging.DEBUG), - ('INFO', logging.INFO), - ('WARNING', logging.WARNING), - ('WARNING', logging.WARNING), - ('ERROR', logging.ERROR), - ('CRITICAL', logging.CRITICAL), - (logging.ERROR, logging.ERROR), -]) +@pytest.mark.parametrize( + "input,expected", + [ + ("DEBUG", logging.DEBUG), + ("INFO", logging.INFO), + ("WARNING", logging.WARNING), + ("WARNING", logging.WARNING), + ("ERROR", logging.ERROR), + ("CRITICAL", logging.CRITICAL), + (logging.ERROR, logging.ERROR), + ], +) def test_level_number(input, expected): assert level_number(input) == expected -@pytest.mark.parametrize('input,expected', [ - (logging.DEBUG, 'DEBUG'), - (logging.INFO, 'INFO'), - (logging.WARN, 'WARNING'), - (logging.WARNING, 'WARNING'), - (logging.ERROR, 'ERROR'), - (logging.CRITICAL, 'CRITICAL'), - ('INFO', 'INFO'), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (logging.DEBUG, "DEBUG"), + (logging.INFO, "INFO"), + (logging.WARN, "WARNING"), + (logging.WARNING, "WARNING"), + (logging.ERROR, "ERROR"), + (logging.CRITICAL, "CRITICAL"), + ("INFO", "INFO"), + ], +) def test_level_name(input, expected): assert level_name(input) == expected class test_setup_logging: - def test_default(self): - with patch('mode.utils.logging._setup_logging') as _sl: - setup_logging(loglevel='INFO', logfile=None) + with patch("mode.utils.logging._setup_logging") as _sl: + setup_logging(loglevel="INFO", logfile=None) _sl.assert_called_once_with( level=logging.INFO, @@ -179,12 +190,12 @@ def test_default(self): ) def test_logfile(self): - with patch('mode.utils.logging._setup_logging') as _sl: - setup_logging(loglevel='INFO', logfile='foo.txt') + with patch("mode.utils.logging._setup_logging") as _sl: + setup_logging(loglevel="INFO", logfile="foo.txt") _sl.assert_called_once_with( level=logging.INFO, - filename='foo.txt', + filename="foo.txt", stream=None, loghandlers=None, logging_config=None, @@ -192,8 +203,8 @@ def test_logfile(self): def test_io(self): logfile = Mock() - with patch('mode.utils.logging._setup_logging') as _sl: - setup_logging(loglevel='INFO', logfile=logfile) + with patch("mode.utils.logging._setup_logging") as _sl: + setup_logging(loglevel="INFO", logfile=logfile) _sl.assert_called_once_with( level=logging.INFO, @@ -206,8 +217,8 @@ def test_io(self): def test_io_no_tty(self): logfile = Mock() logfile.isatty.side_effect = AttributeError() - with patch('mode.utils.logging._setup_logging') as _sl: - setup_logging(loglevel='INFO', logfile=logfile) + with patch("mode.utils.logging._setup_logging") as _sl: + setup_logging(loglevel="INFO", logfile=logfile) _sl.assert_called_once_with( level=logging.INFO, @@ -219,14 +230,12 @@ def test_io_no_tty(self): class test__setup_logging: - def setup_method(self, method): - self.extension_formatter_patch = patch( - 'mode.utils.logging.ExtensionFormatter') + self.extension_formatter_patch = patch("mode.utils.logging.ExtensionFormatter") self.extension_formatter = self.extension_formatter_patch.start() - self.colorlog_patch = patch('mode.utils.logging.colorlog') + self.colorlog_patch = patch("mode.utils.logging.colorlog") self.colorlog = self.colorlog_patch.start() - self.logging_patch = patch('mode.utils.logging.logging') + self.logging_patch = patch("mode.utils.logging.logging") self.logging = self.logging_patch.start() def teardown_method(self): @@ -240,10 +249,10 @@ def test_get_logger(self): def test_setup_logging_helper_both_filename_and_stream(self): with pytest.raises(AssertionError): - _setup_logging(filename='TEMP', stream=Mock()) + _setup_logging(filename="TEMP", stream=Mock()) def test_setup_logging_helper_with_filename(self): - _setup_logging(filename='TEMP') + _setup_logging(filename="TEMP") self.logging.config.dictConfig.assert_called_once_with(ANY) def test_setup_logging_helper_with_stream_no_handlers(self): @@ -258,59 +267,57 @@ def test_setup_logging_helper_with_stream(self): loghandlers=[mock_handler], ) self.logging.config.dictConfig.assert_called_once_with(ANY) - self.logging.root.handlers.extend.assert_called_once_with( - [mock_handler]) + self.logging.root.handlers.extend.assert_called_once_with([mock_handler]) def test_setup_logging_helper_with_merge_config(self): _setup_logging( filename=None, stream=Mock(), - logging_config={'merge': True, 'foo': 1}, + logging_config={"merge": True, "foo": 1}, ) self.logging.config.dictConfig.assert_called_once_with(ANY) def test_setup_logging_helper_no_merge_config(self): _setup_logging( - logging_config={'merge': False, 'foo': 1}, + logging_config={"merge": False, "foo": 1}, ) self.logging.config.dictConfig.assert_called_once_with(ANY) class test_Logwrapped: - @pytest.fixture() def obj(self): - return Mock(name='obj') + return Mock(name="obj") @pytest.fixture() def logger(self): - return Mock(name='logger') + return Mock(name="logger") @pytest.fixture() def wrapped(self, *, obj, logger): - return Logwrapped(obj, logger, severity='INFO', ident='ident') + return Logwrapped(obj, logger, severity="INFO", ident="ident") def test_constructor(self, wrapped): assert wrapped.severity == logging.INFO def test_wrapper(self, wrapped, obj): - obj.calculate.__name__ = 'calculate' + obj.calculate.__name__ = "calculate" wrapped.calculate(1, 2, kw=1) obj.calculate.assert_called_once_with(1, 2, kw=1) def test_wrapper__no_ident(self, wrapped, obj): wrapped.ident = None - obj.calculate.__name__ = 'calculate' + obj.calculate.__name__ = "calculate" wrapped.calculate(1, 2, kw=1) obj.calculate.assert_called_once_with(1, 2, kw=1) def test_wrapper__no_args(self, wrapped, obj): - obj.calculate.__name__ = 'calculate' + obj.calculate.__name__ = "calculate" wrapped.calculate() obj.calculate.assert_called_once_with() def test_wrapper__only_kwargs(self, wrapped, obj): - obj.calculate.__name__ = 'calculate' + obj.calculate.__name__ = "calculate" wrapped.calculate(kw=3) obj.calculate.assert_called_once_with(kw=3) @@ -332,7 +339,7 @@ def test_print_task_name(): out = io.StringIO() task = Mock() task.__wrapped__ = Mock() - task._coro.__name__ = 'foo' + task._coro.__name__ = "foo" print_task_name(task, file=out) assert out.getvalue() @@ -343,10 +350,9 @@ def test_print_task_name(): class test_flight_recorder: - @pytest.fixture() def logger(self): - return Mock(name='logger') + return Mock(name="logger") @pytest.fixture() def bb(self, *, logger): @@ -378,7 +384,7 @@ def test_wrap_error(self, bb): def test_wrap(self, bb): obj = Mock() - with patch('mode.utils.logging.Logwrapped') as Logwrapped: + with patch("mode.utils.logging.Logwrapped") as Logwrapped: ret = bb.wrap(logging.ERROR, obj) assert ret is Logwrapped.return_value Logwrapped.assert_called_once_with( @@ -390,13 +396,14 @@ def test_wrap(self, bb): def test_activate(self, bb): bb._fut = None bb._waiting = Mock() - with patch('mode.utils.logging.current_task') as current_task: - with patch('asyncio.ensure_future') as ensure_future: + with patch("mode.utils.logging.current_task") as current_task: + with patch("asyncio.ensure_future") as ensure_future: bb.activate() assert bb.started_at_date assert bb.enabled_by is current_task.return_value ensure_future.assert_called_once_with( - bb._waiting.return_value, loop=bb.loop, + bb._waiting.return_value, + loop=bb.loop, ) assert bb._fut is ensure_future.return_value @@ -420,38 +427,43 @@ def test_cancel(self, bb): def test_log__active(self, bb, logger): bb._fut = Mock() bb._buffer_log = Mock() - bb.log(logging.DEBUG, 'msg %r %(foo)s', 1, foo='bar') + bb.log(logging.DEBUG, "msg %r %(foo)s", 1, foo="bar") bb._buffer_log.assert_called_once_with( - logging.DEBUG, 'msg %r %(foo)s', (1,), {'foo': 'bar'}, + logging.DEBUG, + "msg %r %(foo)s", + (1,), + {"foo": "bar"}, ) def test_log__inactive(self, bb, logger): bb._fut = None bb._buffer_log = Mock() - bb.log(logging.DEBUG, 'msg %r %(foo)s', 1, foo='bar') + bb.log(logging.DEBUG, "msg %r %(foo)s", 1, foo="bar") log_called_with( logger, - logging.DEBUG, 'msg %r %(foo)s', 1, foo='bar', stacklevel=2, + logging.DEBUG, + "msg %r %(foo)s", + 1, + foo="bar", + stacklevel=2, ) def test__buffer_log(self, bb): - with patch('mode.utils.logging.asctime') as asctime: - bb._buffer_log( - logging.ERROR, 'msg %r %(foo)s', (1,), {'foo': 'bar'}) + with patch("mode.utils.logging.asctime") as asctime: + bb._buffer_log(logging.ERROR, "msg %r %(foo)s", (1,), {"foo": "bar"}) assert bb._logs[-1] == LogMessage( logging.ERROR, - 'msg %r %(foo)s', + "msg %r %(foo)s", asctime(), (1,), - {'foo': 'bar'}, + {"foo": "bar"}, ) @pytest.mark.asyncio async def test__waiting__cancelled(self, bb): assert not bb._logs - bb._buffer_log( - logging.ERROR, 'msg %r %(foo)s', (1,), {'foo': 'bar'}) - with patch('asyncio.sleep', AsyncMock()) as sleep: + bb._buffer_log(logging.ERROR, "msg %r %(foo)s", (1,), {"foo": "bar"}) + with patch("asyncio.sleep", AsyncMock()) as sleep: sleep.coro.side_effect = asyncio.CancelledError() await bb._waiting() sleep.assert_called_once_with(bb.timeout) @@ -460,24 +472,23 @@ async def test__waiting__cancelled(self, bb): @pytest.mark.asyncio async def test__waiting__has_logs(self, bb): assert not bb._logs - bb._buffer_log( - logging.ERROR, 'msg %r %(foo)s', (1,), {'foo': 'bar'}) + bb._buffer_log(logging.ERROR, "msg %r %(foo)s", (1,), {"foo": "bar"}) assert bb._logs - with patch('asyncio.sleep', AsyncMock()): + with patch("asyncio.sleep", AsyncMock()): await bb._waiting() @pytest.mark.asyncio async def test__waiting__no_logs(self, bb): assert not bb._logs - with patch('asyncio.sleep', AsyncMock()): + with patch("asyncio.sleep", AsyncMock()): await bb._waiting() @pytest.mark.asyncio async def test__waiting__enabled_by(self, bb): assert not bb._logs bb.enabled_by = Mock() - with patch('asyncio.sleep', AsyncMock()): - with patch('mode.utils.logging.format_task_stack') as fts: + with patch("asyncio.sleep", AsyncMock()): + with patch("mode.utils.logging.format_task_stack") as fts: await bb._waiting() fts.assert_called_once_with(bb.enabled_by) @@ -485,7 +496,7 @@ async def test__waiting__enabled_by(self, bb): async def test__waiting__raises(self, bb): assert not bb._logs bb.logger.warning = Mock(side_effect=KeyError()) - with patch('asyncio.sleep', AsyncMock()): + with patch("asyncio.sleep", AsyncMock()): with pytest.raises(KeyError): await bb._waiting() @@ -501,32 +512,31 @@ def test_context(self, bb): class test_FileLogProxy: - def test_constructor__defaults(self): - logger = get_logger('foo') + logger = get_logger("foo") logger.level = None assert logger.level is None f = FileLogProxy(logger) assert f.severity == logging.WARN def test_constructor__severity_from_logger(self): - logger = get_logger('foo') + logger = get_logger("foo") logger.level = logging.DEBUG f = FileLogProxy(logger) assert f.severity == logging.DEBUG def test_constructor__explicit_severity(self): - logger = get_logger('foo') + logger = get_logger("foo") logger.level = logging.DEBUG f = FileLogProxy(logger, severity=logging.ERROR) assert f.severity == logging.ERROR def test__safewrap_handler(self): - f = FileLogProxy(get_logger('foo')) + f = FileLogProxy(get_logger("foo")) handler = Mock() f._safewrap_handler(handler) - with patch('traceback.print_exc') as print_exc: + with patch("traceback.print_exc") as print_exc: record = Mock() handler.handleError(record) print_exc.assert_called_once_with(None, sys.__stderr__) @@ -537,32 +547,34 @@ def test_write(self): logger = Mock(handlers=[]) f = FileLogProxy(logger) f._threadlocal.recurse_protection = True - f.write('foo') + f.write("foo") logger.log.assert_not_called() f._threadlocal.recurse_protection = False - f.write('') - f.write(' ') + f.write("") + f.write(" ") f.close() - f.write('msg') + f.write("msg") logger.log.assert_not_called() f._closed = False - f.write(' msg ') - logger.log.assert_called_once_with(f.severity, 'msg') - - f.writelines(['foo', 'bar']) - logger.log.assert_has_calls([ - call(f.severity, 'msg'), - call(f.severity, 'foo'), - call(f.severity, 'bar'), - ]) + f.write(" msg ") + logger.log.assert_called_once_with(f.severity, "msg") + + f.writelines(["foo", "bar"]) + logger.log.assert_has_calls( + [ + call(f.severity, "msg"), + call(f.severity, "foo"), + call(f.severity, "bar"), + ] + ) def test_flush(self): - FileLogProxy(get_logger('foo')).flush() + FileLogProxy(get_logger("foo")).flush() def test_isatty(self): - assert not FileLogProxy(get_logger('foo')).isatty() + assert not FileLogProxy(get_logger("foo")).isatty() def test_redirect_stdouts(): @@ -589,10 +601,13 @@ def test_redirect_stdouts(): @pytest.mark.asyncio -@pytest.mark.parametrize('extra_context', [ - {}, - {'foo': 'bar'}, -]) +@pytest.mark.parametrize( + "extra_context", + [ + {}, + {"foo": "bar"}, + ], +) async def test_on_timeout(extra_context): logger = Mock() assert isinstance(on_timeout, _FlightRecorderProxy) @@ -600,15 +615,15 @@ async def test_on_timeout(extra_context): # Test no errors when there's no active flight recorder _assert_log_severities(on_timeout) - with patch('mode.utils.logging.asctime') as asctime: - asctime.return_value = 'TIME' + with patch("mode.utils.logging.asctime") as asctime: + asctime.return_value = "TIME" # Test logging to active flight recorder (with nesting) with flight_recorder(logger, timeout=300) as fl1: fl1.extra_context.update(extra_context) assert current_flight_recorder() is fl1 _assert_recorder_exercised(on_timeout, fl1) - with flight_recorder(logger, timeout=30)as fl2: + with flight_recorder(logger, timeout=30) as fl2: for k, v in fl1.extra_context.items(): assert fl2.extra_context[k] == v assert current_flight_recorder() is fl2 @@ -622,35 +637,29 @@ async def test_on_timeout(extra_context): def _assert_log_severities(logger): - logger.debug('DEBUG %d %(a)s', 1, a='A') - logger.info('INFO %d %(b)s', 2, b='B') - logger.warning('WARNING %d %(c)s', 3, c='C') - logger.error('ERROR %d %(d)s', 4, d='D') - logger.critical('CRITICAL %d %(e)s', 5, e='E') + logger.debug("DEBUG %d %(a)s", 1, a="A") + logger.info("INFO %d %(b)s", 2, b="B") + logger.warning("WARNING %d %(c)s", 3, c="C") + logger.error("ERROR %d %(d)s", 4, d="D") + logger.critical("CRITICAL %d %(e)s", 5, e="E") def _log_kwargs(kwargs): if HAS_STACKLEVEL: - kwargs.setdefault('stacklevel', 3) + kwargs.setdefault("stacklevel", 3) return kwargs EXPECTED_LOG_MESSAGES = [ + LogMessage(logging.DEBUG, "DEBUG %d %(a)s", "TIME", (1,), _log_kwargs({"a": "A"})), + LogMessage(logging.INFO, "INFO %d %(b)s", "TIME", (2,), _log_kwargs({"b": "B"})), LogMessage( - logging.DEBUG, - 'DEBUG %d %(a)s', 'TIME', (1,), _log_kwargs({'a': 'A'})), - LogMessage( - logging.INFO, - 'INFO %d %(b)s', 'TIME', (2,), _log_kwargs({'b': 'B'})), - LogMessage( - logging.WARNING, - 'WARNING %d %(c)s', 'TIME', (3,), _log_kwargs({'c': 'C'})), - LogMessage( - logging.ERROR, - 'ERROR %d %(d)s', 'TIME', (4,), _log_kwargs({'d': 'D'})), + logging.WARNING, "WARNING %d %(c)s", "TIME", (3,), _log_kwargs({"c": "C"}) + ), + LogMessage(logging.ERROR, "ERROR %d %(d)s", "TIME", (4,), _log_kwargs({"d": "D"})), LogMessage( - logging.CRITICAL, - 'CRITICAL %d %(e)s', 'TIME', (5,), _log_kwargs({'e': 'E'})), + logging.CRITICAL, "CRITICAL %d %(e)s", "TIME", (5,), _log_kwargs({"e": "E"}) + ), ] @@ -660,15 +669,15 @@ def _assert_recorder_exercised(logger, fl): def _assert_recorder_flush_logs(logger, fl): - fl.flush_logs(ident='IDENT') + fl.flush_logs(ident="IDENT") def _get_call(sev, msg, datestr, args, kwargs): kw = deepcopy(kwargs) if fl.extra_context: - extra = kw.setdefault('extra', {}) - data = extra.setdefault('data', {}) + extra = kw.setdefault("extra", {}) + data = extra.setdefault("data", {}) data.update(fl.extra_context) - return call(sev, f'[%s] (%s) {msg}', 'IDENT', datestr, *args, **kw) + return call(sev, f"[%s] (%s) {msg}", "IDENT", datestr, *args, **kw) logger.log.assert_has_calls( _get_call(sev, msg, datestr, args, kwargs) diff --git a/t/unit/utils/test_objects.py b/t/unit/utils/test_objects.py index d1ae3cd8..20c973d5 100644 --- a/t/unit/utils/test_objects.py +++ b/t/unit/utils/test_objects.py @@ -1,9 +1,8 @@ import abc -import sys +import collections.abc import pickle +import sys import typing -import collections.abc - from typing import ( AbstractSet, ClassVar, @@ -22,8 +21,10 @@ ) import pytest + from mode import Service, ServiceT from mode.services import ServiceBase, ServiceCallbacks +from mode.utils.mocks import ANY, IN, Mock from mode.utils.objects import ( ForwardRef, InvalidAnnotation, @@ -45,7 +46,6 @@ remove_optional, shortname, ) -from mode.utils.mocks import ANY, IN, Mock PY37 = sys.version_info >= (3, 7) @@ -72,32 +72,33 @@ class A(B): ... -@pytest.mark.parametrize('cls,stop,expected_mro', [ - (A, Service, [D, C, B, A]), - (B, Service, [D, C, B]), - (C, Service, [D, C]), - (D, Service, [D]), - (A, - object, - ([ServiceCallbacks, Generic] + - EXTRA_GENERIC_INHERITS_FROM + - [ANY, - ServiceT, - ServiceBase, - Service, - D, C, B, A])), - (A, B, [A]), - (A, C, [B, A]), - (A, D, [C, B, A]), -]) +@pytest.mark.parametrize( + "cls,stop,expected_mro", + [ + (A, Service, [D, C, B, A]), + (B, Service, [D, C, B]), + (C, Service, [D, C]), + (D, Service, [D]), + ( + A, + object, + ( + [ServiceCallbacks, Generic] + + EXTRA_GENERIC_INHERITS_FROM + + [ANY, ServiceT, ServiceBase, Service, D, C, B, A] + ), + ), + (A, B, [A]), + (A, C, [B, A]), + (A, D, [C, B, A]), + ], +) def test_iter_mro_reversed(cls, stop, expected_mro): assert list(iter_mro_reversed(cls, stop=stop)) == expected_mro class test_cached_property: - class X(object): - @cached_property def foo(self): return 42 @@ -139,10 +140,10 @@ def x_deleter(self): return self.X_deleter() def test_get(self, x): - assert 'foo' not in x.__dict__ + assert "foo" not in x.__dict__ assert not type(x).foo.is_set(x) assert x.foo == 42 - assert x.__dict__['foo'] == 42 + assert x.__dict__["foo"] == 42 assert type(x).foo.is_set(x) assert x.foo == 42 @@ -156,7 +157,7 @@ def test_set(self, x): assert x.foo == 42 x.foo = 303 assert x.foo == 303 - assert x.__dict__['foo'] == 303 + assert x.__dict__["foo"] == 303 def test_set_setter(self, x_setter): assert x_setter.foo == 1 @@ -165,11 +166,11 @@ def test_set_setter(self, x_setter): assert x_setter._foo == 2 def test_del(self, x): - assert 'foo' not in x.__dict__ + assert "foo" not in x.__dict__ assert x.foo == 42 - assert 'foo' in x.__dict__ + assert "foo" in x.__dict__ del x.foo - assert 'foo' not in x.__dict__ + assert "foo" not in x.__dict__ def test_del_deleter(self, x_deleter): del x_deleter.foo @@ -179,15 +180,15 @@ def test_del_deleter(self, x_deleter): assert x_deleter._foo is None def test_get__class_attribute(self): - class X: - foo = 'quick brown fox' + foo = "quick brown fox" def _get_bar(self): return 42 - bar = cached_property(_get_bar, class_attribute='foo') - assert X.bar == 'quick brown fox' + bar = cached_property(_get_bar, class_attribute="foo") + + assert X.bar == "quick brown fox" assert X().bar == 42 @@ -195,115 +196,121 @@ def test_Unordered(): assert Unordered(1) < Unordered(10) x = set() - x.add(Unordered({'foo': 'bar'})) - x.add(Unordered({'foo': 'bar'})) + x.add(Unordered({"foo": "bar"})) + x.add(Unordered({"foo": "bar"})) assert len(x) == 2 assert repr(x) def test__restore_from_keywords(): m = Mock() - _restore_from_keywords(m, {'foo': 1, 'bar': 20}) + _restore_from_keywords(m, {"foo": 1, "bar": 20}) m.assert_called_once_with(foo=1, bar=20) class X(KeywordReduce): - def __init__(self, name, age): self.name = name self.age = age def __reduce_keywords__(self): - return {'name': self.name, 'age': self.age} + return {"name": self.name, "age": self.age} def test_KeywordReduce(): with pytest.raises(NotImplementedError): KeywordReduce().__reduce_keywords__() - x = X('foo', 10) + x = X("foo", 10) y = pickle.loads(pickle.dumps(x)) assert y.name == x.name assert y.age == x.age def test_qualname_object(): - class X: ... - assert qualname('foo') == 'builtins.str' - assert qualname(str) == 'builtins.str' + assert qualname("foo") == "builtins.str" + assert qualname(str) == "builtins.str" - assert qualname(X).endswith('test_qualname_object..X') - assert qualname(X()).endswith('test_qualname_object..X') + assert qualname(X).endswith("test_qualname_object..X") + assert qualname(X()).endswith("test_qualname_object..X") def test_shortname_object(): - class X: ... - assert shortname('foo') == 'builtins.str' - assert shortname(str) == 'builtins.str' + assert shortname("foo") == "builtins.str" + assert shortname(str) == "builtins.str" - assert shortname(X) == __name__ + '.X' - assert shortname(X()) == __name__ + '.X' + assert shortname(X) == __name__ + ".X" + assert shortname(X()) == __name__ + ".X" def test_canoname(): - class X: ... - X.__module__ = '__main__' + + X.__module__ = "__main__" x = X() class Y: ... + y = Y() - assert canoname(X, main_name='faust') == 'faust.test_canoname..X' - assert canoname(x, main_name='faust') == 'faust.test_canoname..X' - assert canoname(Y, main_name='faust') == '.'.join([ - __name__, - 'test_canoname..Y', - ]) - assert canoname(y, main_name='faust') == '.'.join([ - __name__, - 'test_canoname..Y', - ]) + assert canoname(X, main_name="faust") == "faust.test_canoname..X" + assert canoname(x, main_name="faust") == "faust.test_canoname..X" + assert canoname(Y, main_name="faust") == ".".join( + [ + __name__, + "test_canoname..Y", + ] + ) + assert canoname(y, main_name="faust") == ".".join( + [ + __name__, + "test_canoname..Y", + ] + ) def test_canonshortname(): - class X: ... - X.__module__ = '__main__' + + X.__module__ = "__main__" x = X() class Y: ... + y = Y() - assert canonshortname(X, main_name='faust') == 'faust.X' - assert canonshortname(x, main_name='faust') == 'faust.X' - assert canonshortname(Y, main_name='faust') == '.'.join([ - __name__, - 'Y', - ]) - assert canonshortname(y, main_name='faust') == '.'.join([ - __name__, - 'Y', - ]) + assert canonshortname(X, main_name="faust") == "faust.X" + assert canonshortname(x, main_name="faust") == "faust.X" + assert canonshortname(Y, main_name="faust") == ".".join( + [ + __name__, + "Y", + ] + ) + assert canonshortname(y, main_name="faust") == ".".join( + [ + __name__, + "Y", + ] + ) def test_annotations(): - class X: Foo: ClassVar[int] = 3 - foo: 'int' - bar: List['X'] - baz: Union[List['X'], str] + foo: "int" + bar: List["X"] + baz: Union[List["X"], str] mas: int = 3 fields, defaults = annotations( @@ -313,22 +320,21 @@ class X: ) assert fields == { - 'Foo': ClassVar[int], - 'foo': int, - 'bar': List[X], - 'baz': Union[List[X], str], - 'mas': int, + "Foo": ClassVar[int], + "foo": int, + "bar": List[X], + "baz": Union[List[X], str], + "mas": int, } - assert defaults['mas'] == 3 + assert defaults["mas"] == 3 def test_annotations__skip_classvar(): - class X: Foo: ClassVar[int] = 3 - foo: 'int' - bar: List['X'] - baz: Union[List['X'], str] + foo: "int" + bar: List["X"] + baz: Union[List["X"], str] mas: int = 3 fields, defaults = annotations( @@ -339,16 +345,15 @@ class X: ) assert fields == { - 'foo': int, - 'bar': List[X], - 'baz': Union[List[X], str], - 'mas': int, + "foo": int, + "bar": List[X], + "baz": Union[List[X], str], + "mas": int, } - assert defaults['mas'] == 3 + assert defaults["mas"] == 3 def test_annotations__invalid_type(): - class X: foo: List @@ -363,12 +368,11 @@ class X: def test_annotations__no_local_ns_raises(): - class Bar: ... class X: - bar: 'Bar' + bar: "Bar" with pytest.raises(NameError): annotations( @@ -379,15 +383,15 @@ class X: def test__ForwardRef_safe_eval(): - ref1 = ForwardRef('int') + ref1 = ForwardRef("int") assert _ForwardRef_safe_eval(ref1) == int assert _ForwardRef_safe_eval(ref1) == int assert ref1.__forward_evaluated__ assert ref1.__forward_value__ == int + assert _ForwardRef_safe_eval(ForwardRef("foo"), localns={"foo": str}) == str assert _ForwardRef_safe_eval( - ForwardRef('foo'), localns={'foo': str}) == str - assert _ForwardRef_safe_eval( - ForwardRef('ClassVar[int]'), globalns=globals(), localns=locals()) + ForwardRef("ClassVar[int]"), globalns=globals(), localns=locals() + ) # Union[type(None)] actually returns None @@ -396,46 +400,58 @@ def test__ForwardRef_safe_eval(): WeirdNoneUnion.__args__ = (type(None), type(None)) -@pytest.mark.parametrize('input,expected', [ - (Optional[str], str), - (Union[str, None], str), - (Union[str, type(None)], str), - (Union[str, None], str), - (Optional[List[str]], List[str]), - (Optional[Mapping[int, str]], Mapping[int, str]), - (Optional[AbstractSet[int]], AbstractSet[int]), - (Optional[Set[int]], Set[int]), - (Optional[Tuple[int, ...]], Tuple[int, ...]), - (Optional[Dict[int, str]], Dict[int, str]), - (Optional[List[int]], List[int]), - (str, str), - (List[str], List[str]), - (Union[str, int, float], Union[str, int, float]), - (WeirdNoneUnion, WeirdNoneUnion), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (Optional[str], str), + (Union[str, None], str), + (Union[str, type(None)], str), + (Union[str, None], str), + (Optional[List[str]], List[str]), + (Optional[Mapping[int, str]], Mapping[int, str]), + (Optional[AbstractSet[int]], AbstractSet[int]), + (Optional[Set[int]], Set[int]), + (Optional[Tuple[int, ...]], Tuple[int, ...]), + (Optional[Dict[int, str]], Dict[int, str]), + (Optional[List[int]], List[int]), + (str, str), + (List[str], List[str]), + (Union[str, int, float], Union[str, int, float]), + (WeirdNoneUnion, WeirdNoneUnion), + ], +) def test_remove_optional(input, expected): assert remove_optional(input) == expected -@pytest.mark.parametrize('input,expected', [ - (Optional[str], ((), str)), - (Union[str, None], ((), str)), - (Union[str, type(None)], ((), str)), - (Union[str, None], ((), str)), - (Optional[List[str]], ((str,), list)), - (Optional[Mapping[int, str]], - ((int, str), IN(dict, collections.abc.Mapping, typing.Mapping))), - (Optional[AbstractSet[int]], ((int,), - IN(set, collections.abc.Set, typing.AbstractSet))), - (Optional[Set[int]], ((int,), - IN(set, collections.abc.Set, typing.AbstractSet))), - (Optional[Tuple[int, ...]], ((int, ...), IN(tuple, typing.Tuple))), - (Optional[Dict[int, str]], ((int, str), dict)), - (Optional[List[int]], ((int,), list)), - (str, ((), str)), - (List[str], ((str,), list)), - (WeirdNoneUnion, ((type(None), type(None)), Union)), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (Optional[str], ((), str)), + (Union[str, None], ((), str)), + (Union[str, type(None)], ((), str)), + (Union[str, None], ((), str)), + (Optional[List[str]], ((str,), list)), + ( + Optional[Mapping[int, str]], + ((int, str), IN(dict, collections.abc.Mapping, typing.Mapping)), + ), + ( + Optional[AbstractSet[int]], + ((int,), IN(set, collections.abc.Set, typing.AbstractSet)), + ), + ( + Optional[Set[int]], + ((int,), IN(set, collections.abc.Set, typing.AbstractSet)), + ), + (Optional[Tuple[int, ...]], ((int, ...), IN(tuple, typing.Tuple))), + (Optional[Dict[int, str]], ((int, str), dict)), + (Optional[List[int]], ((int,), list)), + (str, ((), str)), + (List[str], ((str,), list)), + (WeirdNoneUnion, ((type(None), type(None)), Union)), + ], +) def test__remove_optional__find_origin(input, expected): assert _remove_optional(input, find_origin=True) == expected @@ -449,31 +465,37 @@ def test__remove_optional_edgecase(): assert res[1] is typing.Union -@pytest.mark.parametrize('input,expected', [ - (Optional[str], True), - (Union[str, None], True), - (Union[str, type(None)], True), - (Union[str, None], True), - (str, False), - (List[str], False), - (Union[str, int, float], False), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (Optional[str], True), + (Union[str, None], True), + (Union[str, type(None)], True), + (Union[str, None], True), + (str, False), + (List[str], False), + (Union[str, int, float], False), + ], +) def test_is_optional(input, expected): assert is_optional(input) == expected -@pytest.mark.parametrize('input,expected', [ - (Tuple[int, ...], (tuple, int)), - (List[int], (list, int)), - (Mapping[str, int], (dict, int)), - (Dict[str, int], (dict, int)), - (MutableMapping[str, int], (dict, int)), - (Set[str], (set, str)), - (FrozenSet[str], (set, str)), - (MutableSet[str], (set, str)), - (AbstractSet[str], (set, str)), - (Sequence[str], (list, str)), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (Tuple[int, ...], (tuple, int)), + (List[int], (list, int)), + (Mapping[str, int], (dict, int)), + (Dict[str, int], (dict, int)), + (MutableMapping[str, int], (dict, int)), + (Set[str], (set, str)), + (FrozenSet[str], (set, str)), + (MutableSet[str], (set, str)), + (AbstractSet[str], (set, str)), + (Sequence[str], (list, str)), + ], +) def test_guess_polymorphic_type(input, expected): assert guess_polymorphic_type(input) == expected assert guess_polymorphic_type(Optional[input]) == expected @@ -481,7 +503,6 @@ def test_guess_polymorphic_type(input, expected): def test_guess_polymorphic_type__not_generic(): - class X: ... @@ -494,15 +515,18 @@ class X: def test_label_pass(): - s = 'foo' + s = "foo" assert label(s) is s -@pytest.mark.parametrize('input,expected', [ - (str, False), - (int, False), - (Union[int, bytes], True), - (Optional[str], True), -]) +@pytest.mark.parametrize( + "input,expected", + [ + (str, False), + (int, False), + (Union[int, bytes], True), + (Optional[str], True), + ], +) def test_is_union(input, expected): assert is_union(input) == expected diff --git a/tox.ini b/tox.ini index 24e95efc..e277ac51 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = 3.8,3.7,3.6,flake8,apicheck,typecheck,docstyle,bandit +envlist = 3.10,3.9,3.8,3.7,3.6,flake8,apicheck,typecheck,docstyle,bandit [testenv] deps= @@ -12,8 +12,10 @@ deps= bandit: bandit sitepackages = False recreate = False -commands = py.test --random-order --open-files -xvv --cov=mode +commands = py.test --random-order --open-files -xvv --cov=mode --cov-branch basepython = + 3.10: python3.10 + 3.9: python3.9 3.8,flake8,typecheck,apicheck,linkcheck,docstyle,bandit: python3.8 3.7: python3.7 3.6: python3.6