diff --git a/.github/workflows/test_lint_deploy.yml b/.github/workflows/test_lint_deploy.yml index 15b7270e..4e35dabf 100644 --- a/.github/workflows/test_lint_deploy.yml +++ b/.github/workflows/test_lint_deploy.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.8", "3.9", "3.10", "3.11"] + python: ["3.10", "3.11", "3.12", "3.13", "3.14"] steps: - uses: actions/checkout@v3 @@ -42,6 +42,12 @@ jobs: publish: runs-on: ubuntu-latest needs: test + + # See https://docs.pypi.org/trusted-publishers/using-a-publisher/ + environment: release + permissions: + id-token: write + # Only Publish if it's a tagged commit if: >- startsWith(github.ref, 'refs/tags/') @@ -50,10 +56,10 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.8 + - name: Set up Python 3.14 uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: 3.14 - name: Install pypa/build run: >- @@ -70,9 +76,6 @@ jobs: --wheel --outdir dist/ . + - name: Publish distribution 📦 to PyPI uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - repository_url: https://upload.pypi.org/legacy/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 032fbba1..950dcf7f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,16 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: +- repo: local + hooks: + - id: cog + name: cog + language: python + additional_dependencies: [cogapp] + entry: 'python -m cogapp -cr --verbosity=1 --markers="[[[cog]]] [[[out]]] [[[end]]]"' + files: \.py$ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-added-large-files - id: check-json @@ -11,15 +19,15 @@ repos: - id: fix-byte-order-marker - id: trailing-whitespace - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort args: [--profile, black] - repo: https://github.com/psf/black - rev: 23.1.0 + rev: 24.2.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 6.0.0 + rev: 7.0.0 hooks: - id: flake8 diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 5101a10e..44dd2215 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -2,9 +2,9 @@ version: 2 build: - os: ubuntu-22.04 + os: ubuntu-24.04 tools: - python: "3.8" + python: "3.10" sphinx: configuration: docs/source/conf.py diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..7e31e585 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,19 @@ +# AGENTS.md + +Read all developer instructions and contribution guidelines in @README.md. + +## Testing your changes + +When changing code in `pyairtable/`, follow these steps one at a time: + +1. `tox -e mypy` +2. `tox -e py314 -- $CHANGED_FILE $CORRESPONDING_TEST_FILE` +3. `tox -e coverage` +4. `make lint` +5. `make docs` +6. `make test` + +When changing code in `docs/`, follow these steps instead: + +1. `make docs` +2. `make lint` diff --git a/MANIFEST.in b/MANIFEST.in index e665cfe9..ecb77817 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,3 +3,4 @@ include *.rst include tox.ini include LICENSE include README.md +exclude pyairtable/formulas.txt diff --git a/Makefile b/Makefile index df2cfaa5..ca48d5ee 100644 --- a/Makefile +++ b/Makefile @@ -10,25 +10,12 @@ hooks: .tox/pre-commit/bin/pre-commit install .tox/pre-commit/bin/pre-commit install-hooks -.PHONY: release release-test bump +.PHONY: release release: - make clean - python -m build --sdist --wheel --outdir ./dist - twine upload ./dist/* + @zsh -c "./scripts/release.sh" -release-test: - make clean - python -m build --sdist --wheel --outdir ./dist - twine upload --repository testpypi ./dist/* - -bump: - @bash -c "./scripts/bump.sh" - -.PHONY: test test-e2e coverage lint format docs clean +.PHONY: test coverage lint format docs clean test: - tox -- -m 'not integration' - -test-e2e: tox coverage: diff --git a/README.md b/README.md index d21db9ed..46b512d1 100644 --- a/README.md +++ b/README.md @@ -16,9 +16,9 @@ pip install pyairtable ## Documentation -Read the full documentation on [pyairtable.readthedocs.io](https://pyairtable.readthedocs.io/en/latest/getting-started.html). +Read the full documentation on [pyairtable.readthedocs.io](https://pyairtable.readthedocs.io/en/stable/getting-started.html). -If you're still using airtable-python-wrapper and want to upgrade, read the [migration guide](https://pyairtable.readthedocs.io/en/latest/migrations.html). +If you're still using airtable-python-wrapper and want to upgrade, read the [migration guide](https://pyairtable.readthedocs.io/en/stable/migrations.html). ## Contributing @@ -29,8 +29,14 @@ Everyone who has an idea or suggestion is welcome to contribute! As maintainers, If it's your first time working on this library, clone the repo, set up pre-commit hooks, and make sure you can run tests (and they pass). If that doesn't work out of the box, please check your local development environment before filing an issue. ```sh -% make setup -% make test +make setup +make test +``` + +For a quick test run (~15s after the environments are created) use: + +```sh +tox -e mypy && tox -e coverage ``` ### Reporting a bug @@ -50,8 +56,10 @@ Anyone who uses this library is welcome to [submit a pull request](https://githu 1. Public functions/methods have docstrings and type annotations. 2. New functionality is accompanied by clear, descriptive unit tests. -3. You can run `make test && make docs` successfully. -4. You have [signed your commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification). +3. The library maintains 100% test coverage. +4. You can run `make test && make docs` successfully. +5. No backwards-incompatible changes (unless discussed in an existing issue). +6. You have [signed your commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification). If you want to discuss an idea you're working on but haven't yet finished all of the above, please [open a draft pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests). That will be a clear signal that you're not asking to merge your code (yet) and are just looking for discussion or feedback. diff --git a/docs/source/_substitutions.rst b/docs/source/_substitutions.rst index b06a1ee3..2ce22d36 100644 --- a/docs/source/_substitutions.rst +++ b/docs/source/_substitutions.rst @@ -1,6 +1,9 @@ -.. |arg_base_id| replace:: An Airtable base id. +.. |arg_base_id| replace:: An Airtable base ID. -.. |arg_record_id| replace:: An Airtable record id. +.. |arg_record_id| replace:: An Airtable record ID. + +.. |arg_table_id_or_name| replace:: An Airtable table ID or name. + Table name should be unencoded, as shown on browser. .. |kwarg_view| replace:: The name or ID of a view. If set, only the records in that view will be returned. @@ -32,8 +35,7 @@ .. |kwarg_formula| replace:: An Airtable formula. The formula will be evaluated for each record, and if the result is none of ``0``, ``false``, ``""``, ``NaN``, ``[]``, or ``#Error!`` the record will be included in the response. If combined with view, only records in that view which satisfy the - formula will be returned. For example, to only include records where - ``COLUMN_A`` isn't empty, pass in ``formula="{COLUMN_A}"``. + formula will be returned. Read more at :doc:`formulas`. .. |kwarg_typecast| replace:: The Airtable API will perform best-effort automatic data conversion from string values. @@ -46,17 +48,52 @@ .. |kwarg_user_locale| replace:: The user locale that should be used to format dates when using `string` as the `cell_format`. See - https://support.airtable.com/hc/en-us/articles/220340268-Supported-locale-modifiers-for-SET-LOCALE + `Supported SET_LOCALE modifiers `__ for valid values. .. |kwarg_time_zone| replace:: The time zone that should be used to format dates when using `string` as the `cell_format`. See - https://support.airtable.com/hc/en-us/articles/216141558-Supported-timezones-for-SET-TIMEZONE + `Supported SET_TIMEZONE timezones `__ for valid values. .. |kwarg_replace| replace:: If ``True``, record is replaced in its entirety by provided fields; if a field is not included its value will - bet set to null. If False, only provided fields are updated. + bet set to null. If ``False``, only provided fields are updated. + +.. |kwarg_use_field_ids| replace:: An optional boolean value that lets you return field objects where the + key is the field id. This defaults to ``False``, which returns field objects where the key is the field name. + This behavior can be overridden by passing ``use_field_ids=True`` to :class:`~pyairtable.Api`. + +.. |kwarg_count_comments| replace:: If ``True``, the API will include a ``commentCount`` + field for each record. This allows you to see which records have comments without fetching + each record individually. Defaults to ``False``. + +.. |kwarg_force_metadata| replace:: + By default, this method will only fetch information from the API if it has not been cached. + If called with ``force=True`` it will always call the API, and will overwrite any cached values. + +.. |kwarg_validate_metadata| replace:: + If ``False``, will create an object without validating the ID/name provided. + If ``True``, will fetch information from the metadata API and validate the ID/name exists, + raising ``KeyError`` if it does not. + +.. |kwarg_orm_fetch| replace:: + If ``True``, records will be fetched and field values will be + updated. If ``False``, new instances are created with the provided IDs, + but field values are unset. + +.. |kwarg_orm_memoize| replace:: + If ``True``, any objects created will be memoized for future reuse. + If ``False``, objects created will *not* be memoized. + The default behavior is defined on the :class:`~pyairtable.orm.Model` subclass. + +.. |kwarg_orm_lazy| replace:: + If ``True``, this field will return empty objects with only IDs; + call :meth:`~pyairtable.orm.Model.fetch` to retrieve values. + +.. |kwarg_permission_level| replace:: + See `application permission levels `__. + +.. |warn| unicode:: U+26A0 .. WARNING SIGN -.. |kwarg_return_fields_by_field_id| replace:: An optional boolean value that lets you return field objects where the - key is the field id. This defaults to `false`, which returns field objects where the key is the field name. +.. |enterprise_only| replace:: |warn| This feature is only available on Enterprise billing plans. diff --git a/docs/source/api.rst b/docs/source/api.rst index 480d51b6..7bc3caf0 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -6,7 +6,7 @@ API Reference ============= -Module: pyairtable +API: pyairtable ******************************* .. autoclass:: pyairtable.Api @@ -18,48 +18,106 @@ Module: pyairtable .. autoclass:: pyairtable.Table :members: +.. autoclass:: pyairtable.Workspace + :members: + +.. autoclass:: pyairtable.Enterprise + :members: + .. autofunction:: pyairtable.retry_strategy -Module: pyairtable.api.types +API: pyairtable.api.enterprise +******************************* + +.. automodule:: pyairtable.api.enterprise + :members: + :exclude-members: Enterprise + :inherited-members: BaseModel, AirtableModel + + +API: pyairtable.api.types ******************************* .. automodule:: pyairtable.api.types :members: -Module: pyairtable.formulas +API: pyairtable.exceptions +******************************* + +.. automodule:: pyairtable.exceptions + :members: + + +API: pyairtable.formulas ******************************* .. automodule:: pyairtable.formulas :members: -Module: pyairtable.models +API: pyairtable.models ******************************* .. automodule:: pyairtable.models :members: + :inherited-members: BaseModel, AirtableModel + +API: pyairtable.models.comment +------------------------------- + +.. automodule:: pyairtable.models.comment + :members: + :exclude-members: Comment + :inherited-members: BaseModel, AirtableModel -Module: pyairtable.orm + +API: pyairtable.models.schema +------------------------------- + +.. automodule:: pyairtable.models.schema + :members: + :inherited-members: BaseModel, AirtableModel + + +API: pyairtable.models.webhook +------------------------------- + +.. automodule:: pyairtable.models.webhook + :members: + :exclude-members: Webhook, WebhookNotification, WebhookPayload + :inherited-members: BaseModel, AirtableModel + + +API: pyairtable.orm ******************************* .. autoclass:: pyairtable.orm.Model :members: +.. autoclass:: pyairtable.orm.SaveResult + :members: + -Module: pyairtable.orm.fields +API: pyairtable.orm.fields ******************************* .. automodule:: pyairtable.orm.fields :members: - :member-order: bysource :exclude-members: valid_types, contains_type :no-inherited-members: -Module: pyairtable.utils +API: pyairtable.testing +******************************* + +.. automodule:: pyairtable.testing + :members: + + +API: pyairtable.utils ******************************* .. automodule:: pyairtable.utils diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 8283c8b8..71e707d6 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -2,13 +2,223 @@ Changelog ========= +3.3.0 (2025-11-05) +------------------------ + +* Added ``count_comments=`` parameter to ``Table.all`` and ``Table.first``. + - `PR #441 `_ +* Added support for `Create Workspace `_ + via :meth:`Enterprise.create_workspace `. + - `PR #442 `_ +* Added support for Python 3.14 and dropped support for Python 3.9. + - `PR #443 `_ +* Added pyairtable.models.schema.FieldType enum. + - `PR #444 `_ + +3.2.0 (2025-08-17) +------------------------ + +* Added several new fields returned in metadata models. + - `PR #434 `_ + +3.1.1 (2025-04-07) +------------------------ + +* Fix a bug affecting :meth:`~pyairtable.orm.Model.from_id` when ``use_field_ids=True``. + - `PR #431 `_ + +3.1.0 (2025-04-07) +------------------------ + +* Added ``Field.field_schema()`` to type-annotated ORM fields. + - `PR #426 `_ +* Fix for incorrect type annotations on :class:`~pyairtable.formulas.FunctionCall`. + - `PR #429 `_ + +3.0.2 (2025-02-25) +------------------------ + +* Fixed broken search feature in the library docs. + - `PR #423 `_ +* Fix for `#421 `_ + which prevented ORM link fields from fetching records of models + that used field IDs instead of field names. + - `PR #422 `_ + +3.0.1 (2024-12-06) +------------------------ + +* Fix for `#415 `_ + which caused an endless loop when making a request via `POST /listRecords`. + - `PR #416 `_, `PR #417 `_ + +2.3.7 (2024-12-06) +------------------------ + +* Fix for `#415 `_ (see above). + +3.0 (2024-11-15) +------------------------ + +* Added support for `new enterprise API endpoints `__. + - `PR #407 `_ +* Refactored methods/properties for constructing URLs in the API. + - `PR #399 `_ +* Dropped support for Pydantic 1.x. + - `PR #397 `_ +* Dropped support for Python 3.8. + - `PR #395 `_ +* Added support for `Upload attachment `_ + via :meth:`Table.upload_attachment ` + or :meth:`AttachmentsList.upload `. + - `PR #389 `_ +* Added :class:`pyairtable.testing.MockAirtable` for easier testing. + - `PR #388 `_ +* Changed the return type of :meth:`Model.save ` + from ``bool`` to :class:`~pyairtable.orm.SaveResult`. + - `PR #387 `_ +* Added ``use_field_ids=`` parameter to :class:`~pyairtable.Api`. + - `PR #386 `_ +* Changed the behavior of :meth:`Model.save ` + to no longer send unmodified field values to the API. + - `PR #381 `_ +* Added command line utility and ORM module generator. See :doc:`cli`. + - `PR #376 `_ +* Added `Enterprise.grant_access ` + and `Enterprise.revoke_access `. + - `PR #373 `_ +* Added support for :ref:`memoization of ORM models `. + - `PR #369 `_ +* Refactored methods for accessing ORM model configuration. + - `PR #366 `_ +* Added ORM fields that :ref:`require a non-null value `. + - `PR #363 `_ +* Renamed ``return_fields_by_field_id=`` to ``use_field_ids=``. + - `PR #362 `_ +* Removed the ``pyairtable.metadata`` module. + - `PR #360 `_ +* Support ``use_field_ids`` in the :ref:`ORM`. + - `PR #355 `_ +* Added ORM field type :class:`~pyairtable.orm.fields.SingleLinkField` + for record links that should only contain one record. + - `PR #354 `_ +* Changed the type of :data:`~pyairtable.orm.Model.created_time` + from ``str`` to ``datetime``, along with all other timestamp fields + used in :ref:`API: pyairtable.models`. + - `PR #352 `_ +* :class:`~pyairtable.orm.fields.TextField` and + :class:`~pyairtable.orm.fields.CheckboxField` return ``""`` + or ``False`` instead of ``None``. + - `PR #347 `_ +* Rewrite of :mod:`pyairtable.formulas` module. See :ref:`Building Formulas`. + - `PR #329 `_ + +2.3.6 (2024-11-11) +------------------------ + +* Fix for `#404 `_ + related to `enterprise endpoint changes `__. + - `PR #405 `_, `PR #406 `_ + +2.3.5 (2024-10-29) +------------------------ + +* Fix for environment variables not getting passed to the ``requests`` + library (`#398 `_). + - `PR #401 `_ + +2.3.4 (2024-10-21) +------------------------ + +* Fixed a crash at import time under Python 3.13. + - `PR #396 `_ + +2.3.3 (2024-03-22) +------------------------ + +* Fixed a bug affecting ORM Meta values which are computed at runtime. + - `PR #357 `_ +* Fixed documentation for the ORM module. + - `PR #356 `_ + +2.3.2 (2024-03-18) +------------------------ + +* Fixed a bug affecting :func:`pyairtable.metadata.get_table_schema`. + - `PR #349 `_ + +2.3.1 (2024-03-14) +------------------------ + +* Fixed a bug affecting how timezones are parsed by :class:`~pyairtable.orm.fields.DatetimeField`. + - `PR #342 `_ +* Fixed a bug affecting :meth:`~pyairtable.Base.create_table`. + - `PR #345 `_ + +2.3.0 (2024-02-25) +------------------------ + +* A breaking API change was accidentally introduced. + Read more in :ref:`Migrating from 2.2 to 2.3`. +* Added support for :ref:`managing permissions and shares` + and :ref:`managing users`. + - `PR #337 `_ +* Added :meth:`Enterprise.audit_log ` + to iterate page-by-page through `audit log events `__. + - `PR #330 `_ +* :meth:`Api.base `, + :meth:`Api.table `, + and :meth:`Base.table ` + will use cached base metadata when called multiple times with ``validate=True``, + unless the caller passes a new keyword argument ``force=True``. + This allows callers to validate the IDs/names of many bases or tables at once + without having to perform expensive network overhead each time. + - `PR #336 `_ + +2.2.2 (2024-01-28) +------------------------ + +* Enterprise methods :meth:`~pyairtable.Enterprise.user`, + :meth:`~pyairtable.Enterprise.users`, and :meth:`~pyairtable.Enterprise.group` + now return collaborations by default. + - `PR #332 `_ +* Added more helper functions for formulas: + :func:`~pyairtable.formulas.LESS`, + :func:`~pyairtable.formulas.LESS_EQUAL`, + :func:`~pyairtable.formulas.GREATER`, + :func:`~pyairtable.formulas.GREATER_EQUAL`, + and + :func:`~pyairtable.formulas.NOT_EQUAL`. + - `PR #323 `_ + +2.2.1 (2023-11-28) +------------------------ + +* :meth:`~pyairtable.Table.update` now accepts ``return_fields_by_field_id=True`` + - `PR #320 `_ + +2.2.0 (2023-11-13) +------------------------ + +* Fixed a bug in how webhook notification signatures are validated + - `PR #312 `_ +* Added support for reading and modifying :doc:`metadata` + - `PR #311 `_ +* Added support for the 'AI Text' field type + - `PR #310 `_ +* Batch methods can now accept generators or iterators, not just lists + - `PR #308 `_ +* Fixed a few documentation errors - + `PR #301 `_, + `PR #306 `_ + 2.1.0 (2023-08-18) ------------------------ * Added classes and methods for managing :ref:`webhooks`. - - `PR #291 `_. + - `PR #291 `_ * Added compatibility with Pydantic 2.0. - - `PR #288 `_. + - `PR #288 `_ 2.0.0 (2023-07-31) ------------------------ @@ -16,55 +226,55 @@ Changelog See :ref:`Migrating from 1.x to 2.0` for detailed migration notes. * Added :class:`~pyairtable.models.Comment` class; see :ref:`Commenting on Records`. - - `PR #282 `_. + - `PR #282 `_ * :meth:`~pyairtable.Table.batch_upsert` now returns the full payload from the Airtable API. - - `PR #281 `_. + - `PR #281 `_ * :ref:`ORM` module is no longer experimental and has a stable API. - - `PR #277 `_. + - `PR #277 `_ * Added :meth:`Model.batch_save ` and :meth:`Model.batch_delete `. - - `PR #274 `_. + - `PR #274 `_ * Added :meth:`Api.whoami ` method. - - `PR #273 `_. + - `PR #273 `_ * pyAirtable will automatically retry requests when throttled by Airtable's QPS. - - `PR #272 `_. + - `PR #272 `_ * ORM Meta attributes can now be defined as callables. - - `PR #268 `_. + - `PR #268 `_ * Removed ``ApiAbstract``. - - `PR #267 `_. + - `PR #267 `_ * Implemented strict type annotations on all functions and methods. - - `PR #263 `_. + - `PR #263 `_ * Return Model instances, not dicts, from :meth:`Model.all ` and :meth:`Model.first `. - - `PR #262 `_. + - `PR #262 `_ * Dropped support for Python 3.7. - - `PR #261 `_. + - `PR #261 `_ * :ref:`ORM` supports all Airtable field types. - - `PR #260 `_. + - `PR #260 `_ 1.5.0 (2023-05-15) ------------------------- * Add support for Airtable's upsert operation (see :ref:`Updating Records`). - - `PR #255 `_. + - `PR #255 `_ * Fix ``return_fields_by_field_id`` in :meth:`~pyairtable.Api.batch_create` and :meth:`~pyairtable.Api.batch_update`. - - `PR #252 `_. + - `PR #252 `_ * Fix ORM crash when Airtable returned additional fields. - - `PR #250 `_. + - `PR #250 `_ * Use POST for URLs that are longer than the 16k character limit set by the Airtable API. - - `PR #247 `_. + - `PR #247 `_ * Added ``endpoint_url=`` param to :class:`~pyairtable.Table`, :class:`~pyairtable.Base`, :class:`~pyairtable.Api`. - - `PR #243 `_. + - `PR #243 `_ * Added ORM :class:`~pyairtable.orm.fields.LookupField`. - - `PR #182 `_. + - `PR #182 `_ * Dropped support for Python 3.6 (reached end of life 2021-12-23) - - `PR #213 `_. + - `PR #213 `_ 1.4.0 (2022-12-14) ------------------------- * Added :func:`pyairtable.retry_strategy`. -* Misc fix in sleep for batch requests `PR #180 `_. +* Misc fix in sleep for batch requests `PR #180 `_ 1.3.0 (2022-08-23) ------------------------- @@ -76,7 +286,7 @@ See :ref:`Migrating from 1.x to 2.0` for detailed migration notes. 1.2.0 (2022-07-09) ------------------------- -* Fixed missing rate limit in :meth:`~pyairtable.Api.batch_update` - `PR #162 `_. +* Fixed missing rate limit in :meth:`~pyairtable.Api.batch_update` - `PR #162 `_ * Added support for new parameter `return_fields_by_field_id` - `PR #161 `_. See updated :ref:`Parameters`. * Added new ``OR`` formula - `PR #148 `_. See :mod:`pyairtable.formulas`. diff --git a/docs/source/cli.rst b/docs/source/cli.rst new file mode 100644 index 00000000..f2f5a60c --- /dev/null +++ b/docs/source/cli.rst @@ -0,0 +1,278 @@ +Using the Command Line +======================= + +pyAirtable ships with a rudimentary command line interface for interacting with Airtable. +This does not have full support for all Airtable API endpoints, but it does provide a way +to interact with the most common use cases. It will usually output JSON. + +CLI Quickstart +-------------- + +.. code-block:: shell + + % pip install 'pyairtable[cli]' + % read -s AIRTABLE_API_KEY + ... + % export AIRTABLE_API_KEY + % pyairtable whoami + {"id": "usrXXXXXXXXXXXX", "email": "you@example.com"} + % pyairtable base YOUR_BASE_ID table YOUR_TABLE_NAME records + [{"id": "recXXXXXXXXXXXX", "fields": {...}}, ...] + +Authentication +-------------- + +There are a few ways to pass your authentication information to the CLI: + +1. Put your API key into the ``AIRTABLE_API_KEY`` environment variable. + If you need to use a different variable name, you can pass the + appropriate variable name with the ``-ke/--key-env`` option. +2. Put your API key into a file, and put the full path to the file + into the ``AIRTABLE_API_KEY_FILE`` environment variable. + If you need to use a different variable name, you can pass the + appropriate variable name with the ``-kf/--key-file`` option. +3. Pass the API key as an argument to the CLI. This is not recommended + as it could be visible to other processes or stored in your shell history. + If you must do it, use the ``-k/--key`` option. + +Shortcuts +--------- + +If you pass a partial command to the CLI, it will try to match it to a full command. +This only works if there is a single unambiguous completion for the partial command. +For example, ``pyairtable e`` will be interpreted as ``pyairtable enterprise``, +but ``pyairtable b`` is ambiguous, as it could mean ``base`` or ``bases``. + +Command list +------------ + +.. [[[cog + from contextlib import redirect_stdout + from io import StringIO + from pyairtable.cli import cli, CLI_COMMANDS + import textwrap + + for cmd in ["", *CLI_COMMANDS]: + with redirect_stdout(StringIO()) as help_output: + cli( + ["-k", "fake", *cmd.split(), "--help"], + prog_name="pyairtable", + standalone_mode=False + ) + if cmd: + heading = " ".join(w for w in cmd.split() if w == w.lower()) + cog.outl() + cog.outl(heading) + cog.outl("~" * len(heading)) + cog.outl() + cog.outl(".. code-block:: text") + cog.outl() + cog.outl(textwrap.indent(help_output.getvalue(), " " * 4)) + ]]] + +.. code-block:: text + + Usage: pyairtable [OPTIONS] COMMAND [ARGS]... + + Options: + -k, --key TEXT Your API key. + -kf, --key-file PATH File containing your API key. + -ke, --key-env VAR Env var containing your API key. + -v, --verbose Print verbose output. + --help Show this message and exit. + + Commands: + whoami Print the current user's information. + bases List all available bases. + base ID schema Print the base schema. + base ID table ID_OR_NAME records Retrieve records from the table. + base ID table ID_OR_NAME schema Print the table's schema as JSON. + base ID collaborators Print base collaborators. + base ID shares Print base shares. + base ID orm Generate a Python ORM module. + enterprise ID info Print information about an enterprise. + enterprise ID user ID_OR_EMAIL Print one user's information. + enterprise ID users ID_OR_EMAIL... Print many users, keyed by user ID. + enterprise ID group ID Print a user group's information. + enterprise ID groups ID... Print many groups, keyed by group ID. + + +whoami +~~~~~~ + +.. code-block:: text + + Usage: pyairtable whoami [OPTIONS] + + Print the current user's information. + + Options: + --help Show this message and exit. + + +bases +~~~~~ + +.. code-block:: text + + Usage: pyairtable bases [OPTIONS] + + List all available bases. + + Options: + --help Show this message and exit. + + +base schema +~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable base BASE_ID schema [OPTIONS] + + Print the base schema. + + Options: + --help Show this message and exit. + + +base table records +~~~~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable base BASE_ID table ID_OR_NAME records [OPTIONS] + + Retrieve records from the table. + + Options: + -f, --formula TEXT Filter records with a formula. + -v, --view TEXT Filter records by a view. + -n, --limit INTEGER Limit the number of records returned. + -S, --sort TEXT Sort records by field(s). + -F, --field TEXT Limit output to certain field(s). + --help Show this message and exit. + + +base table schema +~~~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable base BASE_ID table ID_OR_NAME schema [OPTIONS] + + Print the table's schema as JSON. + + Options: + --help Show this message and exit. + + +base collaborators +~~~~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable base BASE_ID collaborators [OPTIONS] + + Print base collaborators. + + Options: + --help Show this message and exit. + + +base shares +~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable base BASE_ID shares [OPTIONS] + + Print base shares. + + Options: + --help Show this message and exit. + + +base orm +~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable base BASE_ID orm [OPTIONS] + + Generate a Python ORM module. + + Options: + -t, --table NAME_OR_ID Only generate specific table(s). + --help Show this message and exit. + + +enterprise info +~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable enterprise ENTERPRISE_ID info [OPTIONS] + + Print information about an enterprise. + + Options: + --help Show this message and exit. + + +enterprise user +~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable enterprise ENTERPRISE_ID user [OPTIONS] ID_OR_EMAIL + + Print one user's information. + + Options: + --help Show this message and exit. + + +enterprise users +~~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable enterprise ENTERPRISE_ID users [OPTIONS] ID_OR_EMAIL... + + Print many users, keyed by user ID. + + Options: + -c, --collaborations Include collaborations. + -a, --all Retrieve all users. + --help Show this message and exit. + + +enterprise group +~~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable enterprise ENTERPRISE_ID group [OPTIONS] GROUP_ID + + Print a user group's information. + + Options: + --help Show this message and exit. + + +enterprise groups +~~~~~~~~~~~~~~~~~ + +.. code-block:: text + + Usage: pyairtable enterprise ENTERPRISE_ID groups [OPTIONS] GROUP_ID... + + Print many groups, keyed by group ID. + + Options: + -a, --all Retrieve all groups. + -c, --collaborations Include collaborations. + --help Show this message and exit. + +.. [[[end]]] (sum: kYHTqKvqGy) diff --git a/docs/source/conf.py b/docs/source/conf.py index 380add5f..015095dd 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -47,8 +47,10 @@ autodoc_member_order = "bysource" autoclass_content = "class" -# See https://autodoc-pydantic.readthedocs.io/en/stable/users/configuration.html +# See https://autodoc-pydantic.readthedocs.io/en/v1.9.0/users/configuration.html autodoc_pydantic_field_show_alias = False +autodoc_pydantic_field_show_default = False +autodoc_pydantic_field_show_required = False autodoc_pydantic_model_member_order = "bysource" autodoc_pydantic_model_show_config_summary = False autodoc_pydantic_model_show_field_summary = False diff --git a/docs/source/enterprise.rst b/docs/source/enterprise.rst new file mode 100644 index 00000000..5ace9b70 --- /dev/null +++ b/docs/source/enterprise.rst @@ -0,0 +1,195 @@ +.. include:: _substitutions.rst +.. include:: _warn_latest.rst + + +Enterprise Features +============================== + + +Retrieving information +---------------------- + +pyAirtable exposes a number of classes and methods for interacting with enterprise organizations. +The following methods are only available on an `Enterprise plan `__. +If you call one of them against a base that is not part of an enterprise workspace, Airtable will +return a 404 error, and pyAirtable will add a reminder to the exception to check your billing plan. + +.. automethod:: pyairtable.Api.enterprise + :noindex: + +.. automethod:: pyairtable.Base.collaborators + :noindex: + +.. automethod:: pyairtable.Base.shares + :noindex: + +.. automethod:: pyairtable.Workspace.collaborators + :noindex: + +.. automethod:: pyairtable.Enterprise.info + :noindex: + + +Retrieving audit logs +--------------------- + +.. automethod:: pyairtable.Enterprise.audit_log + :noindex: + + +Managing permissions and shares +------------------------------- + +You can use pyAirtable to change permissions on a base or workspace +via the following methods exposed on schema objects. + +If for some reason you need to call these API endpoints without first retrieving +schema information, you might consider calling :meth:`~pyairtable.Api.request` directly. + +`Add base collaborator `__ + + >>> base.collaborators().add_user("usrUserId", "read") + >>> base.collaborators().add_group("ugpGroupId", "edit") + >>> base.collaborators().add("user", "usrUserId", "comment") + +`Add interface collaborator `__ + + >>> base.collaborators().interfaces[pbd].add_user("usrUserId", "read") + >>> base.collaborators().interfaces[pbd].add_group("ugpGroupId", "read") + >>> base.collaborators().interfaces[pbd].add("user", "usrUserId", "read") + +`Add workspace collaborator `__ + + >>> workspace.collaborators().add_user("usrUserId", "read") + >>> workspace.collaborators().add_group("ugpGroupId", "edit") + >>> workspace.collaborators().add("user", "usrUserId", "comment") + +`Update collaborator base permission `__ + + >>> base.collaborators().update("usrUserId", "edit") + >>> base.collaborators().update("ugpGroupId", "edit") + +`Update interface collaborator `__ + + >>> base.collaborators().interfaces[pbd].update("usrUserId", "edit") + >>> base.collaborators().interfaces[pbd].update("ugpGroupId", "edit") + +`Update workspace collaborator `__ + + >>> workspace.collaborators().update("usrUserId", "edit") + >>> workspace.collaborators().update("ugpGroupId", "edit") + +`Delete base collaborator `__ + + >>> base.collaborators().remove("usrUserId") + >>> base.collaborators().remove("ugpGroupId") + +`Delete interface collaborator `__ + + >>> base.collaborators().interfaces[pbd].remove("usrUserId") + >>> base.collaborators().interfaces[pbd].remove("ugpGroupId") + +`Delete workspace collaborator `__ + + >>> workspace.collaborators().remove("usrUserId") + >>> workspace.collaborators().remove("ugpGroupId") + +`Delete base invite `__ + + >>> base.collaborators().invite_links.via_base[0].delete() + >>> workspace.collaborators().invite_links.via_base[0].delete() + +`Delete interface invite `__ + + >>> base.collaborators().interfaces[pbd].invite_links[0].delete() + +`Delete workspace invite `__ + + >>> base.collaborators().invite_links.via_workspace[0].delete() + >>> workspace.collaborators().invite_links.via_workspace[0].delete() + +`Manage share `__ + + >>> share = base.shares()[0] + >>> share.disable() + >>> share.enable() + +`Delete share `__ + + >>> share.delete() + +`Update workspace restrictions `__ + + >>> r = workspace.collaborators().restrictions + >>> r.invite_creation = "unrestricted" + >>> r.share_creation = "onlyOwners" + >>> r.save() + + +Managing users +------------------- + +You can use pyAirtable to manage an enterprise's users +via the following methods. + +`Manage user `__ + + >>> user = enterprise.user("usrUserId") + >>> user.state = "deactivated" + >>> user.email = user.email.replace("@", "+deactivated@") + >>> user.save() + +`Logout user `__ + + >>> user.logout() + +`Delete user by id `__ + + >>> user.delete() + +`Remove user from enterprise `__ + + >>> enterprise.remove_user("usrUserId", replacement="usrOtherUserId") + +`Manage user membership `__ + + >>> enterprise.claim_users({"userId": "managed"}) + +`Delete users by email `__ + + >>> enterprise.delete_users(["foo@example.com", "bar@example.com"]) + +`Grant admin access `__ + + >>> enterprise.grant_admin("usrUserId") + >>> enterprise.grant_admin("user@example.com") + >>> enterprise.grant_admin(enterprise.user("usrUserId")) + +`Revoke admin access `__ + + >>> enterprise.revoke_admin("usrUserId") + >>> enterprise.revoke_admin("user@example.com") + >>> enterprise.revoke_admin(enterprise.user("usrUserId")) + + +Managing workspaces and organizations +-------------------------------------- + +You can use pyAirtable to manage workspaces, user groups, and descendant enterprises +via the following methods. + +`Create workspace `__ + + >>> workspace_id = enterprise.create_workspace("My New Workspace") + +`Move workspaces `__ + + >>> enterprise.move_workspaces(["wspId1", "wspId2"], "entTargetEnterpriseId") + +`Move user groups `__ + + >>> enterprise.move_groups(["ugpId1", "ugpId2"], "entTargetEnterpriseId") + +`Create descendant enterprise `__ + + >>> descendant = enterprise.create_descendant("Descendant Organization Name") diff --git a/docs/source/formulas.rst b/docs/source/formulas.rst new file mode 100644 index 00000000..cf1a7a5d --- /dev/null +++ b/docs/source/formulas.rst @@ -0,0 +1,95 @@ +Building Formulas +================= + +pyAirtable lets you construct formulas at runtime using Python syntax, +and will convert those formula objects into the appropriate strings when +sending them to the Airtable API. + + +Basics +-------------------------- + +In cases where you want to find records with fields matching a computed value, +this library provides the :func:`~pyairtable.formulas.match` function, which +returns a formula that can be passed to methods like :func:`Table.all `: + +.. autofunction:: pyairtable.formulas.match + :noindex: + + +Compound conditions +-------------------------- + +Formulas and conditions can be chained together if you need to create +more complex criteria: + + >>> from datetime import date + >>> from pyairtable.formulas import AND, GTE, Field, match + >>> formula = AND( + ... match("Customer", 'Alice'), + ... GTE(Field("Delivery Date"), date.today()) + ... ) + >>> formula + AND(EQ(Field('Customer'), 'Alice'), + GTE(Field('Delivery Date'), datetime.date(2023, 12, 10))) + >>> str(formula) + "AND({Customer}='Alice', {Delivery Date}>=DATETIME_PARSE('2023-12-10'))" + +pyAirtable has support for the following comparisons: + + .. list-table:: + + * - :class:`pyairtable.formulas.EQ` + - ``lval = rval`` + * - :class:`pyairtable.formulas.NE` + - ``lval != rval`` + * - :class:`pyairtable.formulas.GT` + - ``lval > rval`` + * - :class:`pyairtable.formulas.GTE` + - ``lval >= rval`` + * - :class:`pyairtable.formulas.LT` + - ``lval < rval`` + * - :class:`pyairtable.formulas.LTE` + - ``lval <= rval`` + +These are also implemented as convenience methods on all instances +of :class:`~pyairtable.formulas.Formula`, so that the following are equivalent: + + >>> EQ(Field("Customer"), "Alice") + >>> match({"Customer": "Alice"}) + >>> Field("Customer").eq("Alice") + +pyAirtable exports ``AND``, ``OR``, ``NOT``, and ``XOR`` for chaining conditions. +You can also use Python operators to modify and combine formulas: + + >>> from pyairtable.formulas import match + >>> match({"Customer": "Bob"}) & ~match({"Product": "TEST"}) + AND(EQ(Field('Customer'), 'Bob'), + NOT(EQ(Field('Product'), 'TEST'))) + + .. list-table:: + :header-rows: 1 + + * - Python operator + - `Airtable expression `__ + * - ``lval & rval`` + - ``AND(lval, rval)`` + * - ``lval | rval`` + - ``OR(lval, rval)`` + * - ``~rval`` + - ``NOT(rval)`` + * - ``lval ^ rval`` + - ``XOR(lval, rval)`` + +Calling functions +-------------------------- + +pyAirtable also exports functions that act as placeholders for calling +Airtable formula functions: + + >>> from pyairtable.formulas import Field, GTE, DATETIME_DIFF, TODAY + >>> formula = GTE(DATETIME_DIFF(TODAY(), Field("Purchase Date"), "days"), 7) + >>> str(formula) + "DATETIME_DIFF(TODAY(), {Purchase Date}, 'days')>=7" + +All supported functions are listed in the :mod:`pyairtable.formulas` API reference. diff --git a/docs/source/getting-started.rst b/docs/source/getting-started.rst index b94140c7..87f24c8b 100644 --- a/docs/source/getting-started.rst +++ b/docs/source/getting-started.rst @@ -53,7 +53,7 @@ records in Airtable: "createdTime": "2017-03-14T22:04:31.000Z", "fields": { "Name": "Alice", - "Exail": "alice@example.com" + "Email": "alice@example.com" } } ] diff --git a/docs/source/index.rst b/docs/source/index.rst index 32e09649..2e62d0ba 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -26,10 +26,12 @@ pyAirtable getting-started tables + formulas orm - webhooks metadata - migrations + webhooks + enterprise + cli api @@ -38,6 +40,7 @@ pyAirtable :hidden: about + migrations changelog contributing GitHub diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index 1b8a8c5a..9b501cd7 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -5,11 +5,97 @@ Metadata ============== -The metadata API gives you the ability to list all of your bases, tables, fields, and views. +The Airtable API gives you the ability to list all of your bases, tables, fields, and views. +pyAirtable allows you to inspect and interact with this metadata in your bases. -.. warning:: - This API is experimental and subject to change. +There may be parts of the Airtable API which are not supported below; +you can always use :meth:`Api.request ` to call them directly. -.. automodule:: pyairtable.metadata - :members: +Reading schemas +----------------------------- + +All of the methods below return complex nested data structures, some of which +have their own convenience methods for searching their contents, such as +:meth:`TableSchema.field() `. +You'll find more detail in the API reference for :mod:`pyairtable.models.schema`. + +.. automethod:: pyairtable.Api.bases + :noindex: + +.. automethod:: pyairtable.Base.schema + :noindex: + +.. automethod:: pyairtable.Base.tables + :noindex: + +.. automethod:: pyairtable.Table.schema + :noindex: + + +Modifying existing schema +----------------------------- + +To modify a table or field, you can modify portions of its schema object directly +and call ``save()``, as shown below. The Airtable API only allows changing certain +properties; these are enumerated in the API reference for each schema class. +For example, :class:`~pyairtable.models.schema.TableSchema` allows changing the name, +description, and date dependency configuration. + +.. code-block:: python + + >>> schema = table.schema() + >>> schema.name = "Renamed" + >>> schema.save() + >>> field = schema.field("Name") + >>> field.name = "Label" + >>> field.description = "The primary field on the table" + >>> field.save() + +To add or replace the date dependency configuration on a table, you can use the shortcut method +:meth:`TableSchema.set_date_dependency `. + + +Creating schema elements +----------------------------- + +The following methods allow creating bases, tables, or fields: + +.. automethod:: pyairtable.Api.create_base + :noindex: + +.. automethod:: pyairtable.Workspace.create_base + :noindex: + +.. automethod:: pyairtable.Workspace.move_base + :noindex: + +.. automethod:: pyairtable.Base.create_table + :noindex: + +.. automethod:: pyairtable.Table.create_field + :noindex: + + +Deleting schema elements +----------------------------- + +|enterprise_only| + +The Airtable API does not allow deleting tables or fields, but it does allow +deleting workspaces, bases, and views. pyAirtable supports the following methods: + +To delete a :class:`~pyairtable.Workspace`: + + >>> ws = api.workspace("wspmhESAta6clCCwF") + >>> ws.delete() + +To delete a :class:`~pyairtable.Base`: + + >>> base = api.base("appMxESAta6clCCwF") + >>> base.delete() + +To delete a view, first retrieve its :class:`~pyairtable.models.schema.ViewSchema`: + + >>> vw = table.schema().view("View Name") + >>> vw.delete() diff --git a/docs/source/migrations.rst b/docs/source/migrations.rst index 5891c8c6..c6fc6a8c 100644 --- a/docs/source/migrations.rst +++ b/docs/source/migrations.rst @@ -6,6 +6,190 @@ Migration Guide ***************** +Migrating from 2.x to 3.0 +============================ + +The 3.0 release introduces a number of breaking changes, summarized below. + +Updated minimum dependencies +--------------------------------------------- + +* pyAirtable 3.0 is tested on Python 3.9 or higher. It may continue to work on Python 3.8 + for some time, but bug reports related to Python 3.8 compatibility will not be accepted. +* pyAirtable 3.0 requires Pydantic 2. If your project still uses Pydantic 1, + you will need to continue to use pyAirtable 2.x until you can upgrade Pydantic. + Read the `Pydantic v2 migration guide `__ + for more information. + +Deprecated metadata module removed +--------------------------------------------- + +The 3.0 release removed the ``pyairtable.metadata`` module. For supported alternatives, +see :doc:`metadata`. + +Changes to generating URLs +--------------------------------------------- + +The following properties and methods for constructing URLs have been renamed or removed. +These methods now return instances of :class:`~pyairtable.utils.Url`, which is a +subclass of ``str`` that has some overloaded operators. See docs for more details. + +.. list-table:: + :header-rows: 1 + + * - Building a URL in 2.x + - Building a URL in 3.0 + * - ``table.url`` + - ``table.urls.records`` + * - ``table.record_url(record_id)`` + - ``table.urls.record(record_id)`` + * - ``table.meta_url("one", "two")`` + - ``table.urls.meta / "one" / "two"`` + * - ``table.meta_url(*parts)`` + - ``table.urls.meta // parts`` + * - ``base.url`` + - (removed; was invalid) + * - ``base.meta_url("one", "two")`` + - ``base.urls.meta / "one" / "two"`` + * - ``base.webhooks_url()`` + - ``base.urls.webhooks`` + * - ``enterprise.url`` + - ``enterprise.urls.meta`` + * - ``workspace.url`` + - ``workspace.urls.meta`` + +Changes to the formulas module +--------------------------------------------- + +Most functions and methods in :mod:`pyairtable.formulas` now return instances of +:class:`~pyairtable.formulas.Formula`, which can be chained, combined, and eventually +passed to the ``formula=`` keyword argument to methods like :meth:`~pyairtable.Table.all`. +Read the module documentation for more details. + +The full list of breaking changes is below: + +.. list-table:: + :header-rows: 1 + + * - Function + - Changes + * - :func:`~pyairtable.formulas.match` + - This now raises ``ValueError`` on empty input, + instead of returning ``None``. + * - ``to_airtable_value()`` + - Removed. Use :func:`~pyairtable.formulas.to_formula_str` instead. + * - ``EQUAL()`` + - Removed. Use :class:`~pyairtable.formulas.EQ` instead. + * - ``NOT_EQUAL()`` + - Removed. Use :class:`~pyairtable.formulas.NE` instead. + * - ``LESS()`` + - Removed. Use :class:`~pyairtable.formulas.LT` instead. + * - ``LESS_EQUAL()`` + - Removed. Use :class:`~pyairtable.formulas.LTE` instead. + * - ``GREATER()`` + - Removed. Use :class:`~pyairtable.formulas.GT` instead. + * - ``GREATER_EQUAL()`` + - Removed. Use :class:`~pyairtable.formulas.GTE` instead. + * - ``FIELD()`` + - Removed. Use :class:`~pyairtable.formulas.Field` or :func:`~pyairtable.formulas.field_name`. + * - ``STR_VALUE()`` + - Removed. Use :func:`~pyairtable.formulas.quoted` instead. + * - :func:`~pyairtable.formulas.AND`, :func:`~pyairtable.formulas.OR` + - These no longer return ``str``, and instead return instances of + :class:`~pyairtable.formulas.Comparison`. + * - :func:`~pyairtable.formulas.IF`, :func:`~pyairtable.formulas.FIND`, :func:`~pyairtable.formulas.LOWER` + - These no longer return ``str``, and instead return instances of + :class:`~pyairtable.formulas.FunctionCall`. + * - :func:`~pyairtable.formulas.escape_quotes` + - Deprecated. Use :func:`~pyairtable.formulas.quoted` instead. + +Changes to the ORM in 3.0 +--------------------------------------------- + +* :data:`Model.created_time ` is now a ``datetime`` (or ``None``) + instead of ``str``. This change also applies to all timestamp fields used in :ref:`API: pyairtable.models`. + +* :meth:`Model.save ` now only saves changed fields to the API, which + means it will sometimes not perform any network traffic (though this behavior can be overridden). + It also now returns an instance of :class:`~pyairtable.orm.SaveResult` instead of ``bool``. + +* Fields which contain lists of values now return instances of ``ChangeTrackingList``, which + is still a subclass of ``list``. This should not affect most uses, but it does mean that + some code which relies on exact type checking may need to be updated: + + >>> isinstance(Foo().atts, list) + True + >>> type(Foo().atts) is list + False + >>> type(Foo().atts) + + +* The 3.0 release has changed the API for retrieving ORM model configuration: + + .. list-table:: + :header-rows: 1 + + * - Method in 2.x + - Method in 3.0 + * - ``Model.get_api()`` + - ``Model.meta.api`` + * - ``Model.get_base()`` + - ``Model.meta.base`` + * - ``Model.get_table()`` + - ``Model.meta.table`` + * - ``Model._get_meta(name)`` + - ``Model.meta.get(name)`` + +Breaking type changes +--------------------------------------------- + +* ``pyairtable.api.types.CreateAttachmentDict`` is now a ``Union`` instead of a ``TypedDict``, + which may change some type checking behavior in code that uses it. + +Breaking name changes +--------------------------------------------- + + * - | ``pyairtable.api.enterprise.ClaimUsersResponse`` + | has become :class:`pyairtable.api.enterprise.ManageUsersResponse` + * - | ``pyairtable.formulas.CircularDependency`` + | has become :class:`pyairtable.exceptions.CircularFormulaError` + * - | ``pyairtable.params.InvalidParamException`` + | has become :class:`pyairtable.exceptions.InvalidParameterError` + * - | ``pyairtable.orm.fields.MissingValue`` + | has become :class:`pyairtable.exceptions.MissingValueError` + * - | ``pyairtable.orm.fields.MultipleValues`` + | has become :class:`pyairtable.exceptions.MultipleValuesError` + * - | ``pyairtable.models.AuditLogEvent.model_id`` + | has become :data:`pyairtable.models.AuditLogEvent.object_id` + * - | ``pyairtable.models.AuditLogEvent.model_type`` + | has become :data:`pyairtable.models.AuditLogEvent.object_type` + + +Migrating from 2.2 to 2.3 +============================ + +A breaking API change was accidentally introduced into the 2.3 minor release +by renaming some nested fields of :class:`~pyairtable.models.schema.BaseCollaborators` +and :class:`~pyairtable.models.schema.WorkspaceCollaborators`. + + * - | ``base.collaborators().invite_links.base_invite_links`` + | has become ``base.collaborators().invite_links.via_base`` + * - | ``base.collaborators().invite_links.workspace_invite_links`` + | has become ``base.collaborators().invite_links.via_workspace`` + * - | ``ws.collaborators().invite_links.base_invite_links`` + | has become ``ws.collaborators().invite_links.via_base`` + * - | ``ws.collaborators().invite_links.workspace_invite_links`` + | has become ``ws.collaborators().invite_links.via_workspace`` + * - | ``ws.collaborators().individual_collaborators.base_collaborators`` + | has become ``ws.collaborators().individual_collaborators.via_base`` + * - | ``ws.collaborators().individual_collaborators.workspace_collaborators`` + | has become ``ws.collaborators().individual_collaborators.via_workspace`` + * - | ``ws.collaborators().group_collaborators.base_collaborators`` + | has become ``ws.collaborators().group_collaborators.via_base`` + * - | ``ws.collaborators().group_collaborators.workspace_collaborators`` + | has become ``ws.collaborators().group_collaborators.via_workspace`` + + Migrating from 1.x to 2.0 ============================ @@ -53,7 +237,7 @@ See below for supported and unsupported patterns: # The following will raise a TypeError. We do this proactively # to avoid situations where self.api and self.base don't align. - >>> table = Table(api, base_id, table_name) # [Api, Base, str] + >>> table = Table(api, base, table_name) # [Api, Base, str] You may need to change how your code looks up some pieces of connection metadata; for example: @@ -76,7 +260,7 @@ You may need to change how your code looks up some pieces of connection metadata - :meth:`table.record_url() ` There is no fully exhaustive list of changes; please refer to -:ref:`the API documentation ` for a list of available methods and attributes. +:ref:`the API documentation ` for a list of available methods and attributes. Retry by default ---------------- @@ -97,7 +281,7 @@ Changes to types ---------------- * All functions and methods in this library have full type annotations that will pass ``mypy --strict``. - See the :ref:`types ` module for more information on the types this library accepts and returns. + See the :mod:`pyairtable.api.types` module for more information on the types this library accepts and returns. batch_upsert has a different return type -------------------------------------------- diff --git a/docs/source/orm.rst b/docs/source/orm.rst index 9b2719df..59a6e2a5 100644 --- a/docs/source/orm.rst +++ b/docs/source/orm.rst @@ -20,7 +20,7 @@ The :class:`~pyairtable.orm.Model` class allows you create ORM-style classes for last_name = F.TextField("Last Name") email = F.EmailField("Email") is_registered = F.CheckboxField("Registered") - company = F.LinkField("Company", Company, lazy=False) + company = F.SingleLinkField("Company", Company, lazy=False) class Meta: base_id = "appaPqizdsNHDvlEm" @@ -28,9 +28,13 @@ The :class:`~pyairtable.orm.Model` class allows you create ORM-style classes for api_key = "keyapikey" -Once you have a model, you can create new objects to represent your -Airtable records. Call :meth:`~pyairtable.orm.Model.save` to save the -newly created object to the Airtable API. +Once you have a model, you can query for existing records using the +``first()`` and ``all()`` methods, which take the same arguments as +:meth:`Table.first ` and :meth:`Table.all `. + +You can also create new objects to represent Airtable records you wish +to create and save. Call :meth:`~pyairtable.orm.Model.save` to save the +newly created object back to Airtable. >>> contact = Contact( ... first_name="Mike", @@ -47,7 +51,6 @@ newly created object to the Airtable API. >>> contact.id 'recS6qSLw0OCA6Xul' - You can read and modify attributes, then call :meth:`~pyairtable.orm.Model.save` when you're ready to save your changes to the API. @@ -63,7 +66,7 @@ To refresh a record from the API, use :meth:`~pyairtable.orm.Model.fetch`: >>> contact.is_registered True -Finally, you can use :meth:`~pyairtable.orm.Model.delete` to delete the record: +Use :meth:`~pyairtable.orm.Model.delete` to delete the record: >>> contact.delete() True @@ -77,6 +80,21 @@ create, modify, or delete several records at once: >>> Contact.batch_save(contacts) >>> Contact.batch_delete(contacts) +You can use your model's fields in :doc:`formula expressions `. +ORM models' fields also provide shortcut methods +:meth:`~pyairtable.orm.fields.Field.eq`, +:meth:`~pyairtable.orm.fields.Field.ne`, +:meth:`~pyairtable.orm.fields.Field.gt`, +:meth:`~pyairtable.orm.fields.Field.gte`, +:meth:`~pyairtable.orm.fields.Field.lt`, and +:meth:`~pyairtable.orm.fields.Field.lte`: + + >>> formula = Contact.last_name.eq("Smith") & Contact.is_registered + >>> str(formula) + "AND({Last Name}='Smith', {Registered})" + >>> results = Contact.all(formula=formula) + [...] + Supported Field Types ----------------------------- @@ -91,17 +109,31 @@ read `Field types and cell values `", cls.__doc__ or "") - ro = ' 🔒' if cls.readonly else '' - cog.outl(f" * - :class:`~pyairtable.orm.fields.{cls.__name__}`{ro}") - cog.outl(f" - {', '.join(f'{link}__' for link in links) if links else '(see docs)'}") + def cog_class_table(classes): + cog.outl(".. list-table::") + cog.outl(" :header-rows: 1\n") + cog.outl(" * - ORM field class") + cog.outl(" - Airtable field type(s)") + for cls in classes: + links = re.findall(r"`.+? <.*?field-model.*?>`", cls.__doc__ or "") + ro = ' 🔒' if cls.readonly else '' + cog.outl(f" * - :class:`~pyairtable.orm.fields.{cls.__name__}`{ro}") + cog.outl(f" - {', '.join(f'{link}__' for link in links) if links else '(undocumented)'}") + + classes = sorted(fields.ALL_FIELDS, key=attrgetter("__name__")) + optional = [cls for cls in classes if not cls.__name__.startswith("Required")] + required = [cls for cls in classes if cls.__name__.startswith("Required")] + + cog.outl("..") # terminate the comment block + cog_class_table(optional) + cog.outl("") + cog.outl("Airtable does not have a concept of fields that require values,") + cog.outl("but pyAirtable allows you to enforce that concept within code") + cog.outl("using one of the following field classes.") + cog.outl("") + cog.outl("See :ref:`Required Values` for more details.") + cog.outl("") + cog_class_table(required) ]]] .. .. list-table:: @@ -109,6 +141,8 @@ read `Field types and cell values `__ * - :class:`~pyairtable.orm.fields.AttachmentsField` - `Attachments `__ * - :class:`~pyairtable.orm.fields.AutoNumberField` 🔒 @@ -136,7 +170,7 @@ read `Field types and cell values `__ * - :class:`~pyairtable.orm.fields.EmailField` - - `Email `__ + - `Email `__ * - :class:`~pyairtable.orm.fields.ExternalSyncSourceField` 🔒 - `Sync source `__ * - :class:`~pyairtable.orm.fields.FloatField` @@ -151,8 +185,12 @@ read `Field types and cell values `__ * - :class:`~pyairtable.orm.fields.LookupField` 🔒 - `Lookup `__ + * - :class:`~pyairtable.orm.fields.ManualSortField` 🔒 + - (undocumented) + * - :class:`~pyairtable.orm.fields.MultilineTextField` + - `Long text `__ * - :class:`~pyairtable.orm.fields.MultipleCollaboratorsField` - - `Multiple Collaborators `__ + - `Multiple collaborators `__ * - :class:`~pyairtable.orm.fields.MultipleSelectField` - `Multiple select `__ * - :class:`~pyairtable.orm.fields.NumberField` @@ -164,17 +202,103 @@ read `Field types and cell values `__ * - :class:`~pyairtable.orm.fields.RichTextField` - - `Rich text `__ + - `Rich text `__ * - :class:`~pyairtable.orm.fields.SelectField` - `Single select `__ + * - :class:`~pyairtable.orm.fields.SingleLineTextField` + - `Single line text `__ + * - :class:`~pyairtable.orm.fields.SingleLinkField` + - `Link to another record `__ * - :class:`~pyairtable.orm.fields.TextField` - `Single line text `__, `Long text `__ * - :class:`~pyairtable.orm.fields.UrlField` - `Url `__ -.. [[[end]]] +Airtable does not have a concept of fields that require values, +but pyAirtable allows you to enforce that concept within code +using one of the following field classes. + +See :ref:`Required Values` for more details. + +.. list-table:: + :header-rows: 1 + + * - ORM field class + - Airtable field type(s) + * - :class:`~pyairtable.orm.fields.RequiredAITextField` 🔒 + - `AI Text `__ + * - :class:`~pyairtable.orm.fields.RequiredBarcodeField` + - `Barcode `__ + * - :class:`~pyairtable.orm.fields.RequiredCollaboratorField` + - `Collaborator `__ + * - :class:`~pyairtable.orm.fields.RequiredCountField` 🔒 + - `Count `__ + * - :class:`~pyairtable.orm.fields.RequiredCurrencyField` + - `Currency `__ + * - :class:`~pyairtable.orm.fields.RequiredDateField` + - `Date `__ + * - :class:`~pyairtable.orm.fields.RequiredDatetimeField` + - `Date and time `__ + * - :class:`~pyairtable.orm.fields.RequiredDurationField` + - `Duration `__ + * - :class:`~pyairtable.orm.fields.RequiredEmailField` + - `Email `__ + * - :class:`~pyairtable.orm.fields.RequiredFloatField` + - `Number `__ + * - :class:`~pyairtable.orm.fields.RequiredIntegerField` + - `Number `__ + * - :class:`~pyairtable.orm.fields.RequiredMultilineTextField` + - `Long text `__ + * - :class:`~pyairtable.orm.fields.RequiredNumberField` + - `Number `__ + * - :class:`~pyairtable.orm.fields.RequiredPercentField` + - `Percent `__ + * - :class:`~pyairtable.orm.fields.RequiredPhoneNumberField` + - `Phone `__ + * - :class:`~pyairtable.orm.fields.RequiredRatingField` + - `Rating `__ + * - :class:`~pyairtable.orm.fields.RequiredRichTextField` + - `Rich text `__ + * - :class:`~pyairtable.orm.fields.RequiredSelectField` + - `Single select `__ + * - :class:`~pyairtable.orm.fields.RequiredSingleLineTextField` + - `Single line text `__ + * - :class:`~pyairtable.orm.fields.RequiredTextField` + - `Single line text `__, `Long text `__ + * - :class:`~pyairtable.orm.fields.RequiredUrlField` + - `Url `__ +.. [[[end]]] (sum: PtIJDLJBQM) + + +Type Annotations +------------------ + +pyAirtable uses type annotations to provide hints to type checkers like mypy. +Type annotations improve code readability and help catch errors during development. + +Basic field types like :class:`~pyairtable.orm.fields.TextField` and :class:`~pyairtable.orm.fields.IntegerField` +will have their types inferred from the field's configuration. For example: -Formulas, Rollups, and Lookups +.. code-block:: python + + from pyairtable.orm import Model, fields as F + + class Person(Model): + class Meta: ... + + name = F.TextField("Name") + account_id = F.IntegerField("Account ID") + edited_by = F.LastModifiedByField("Last Modified By") + + record = Person() + reveal_type(record.name) # Revealed type is 'builtins.str*' + reveal_type(record.account_id) # Revealed type is 'builtins.int*' + reveal_type(record.edited_by) # Revealed type is 'pyairtable.api.types.CollaboratorDict' + +You may need to provide type hints to complex fields that involve lists. See below for examples. + + +Formula, Rollup, and Lookup Fields ---------------------------------- The data type of "formula", "rollup", and "lookup" fields will depend on the underlying fields @@ -225,48 +349,104 @@ You can check for errors using the :func:`~pyairtable.api.types.is_airtable_erro True +Required Values +--------------- + +Airtable does not generally have a concept of fields that require values, but +pyAirtable allows you to enforce that a field must have a value before saving it. +To do this, use one of the "Required" field types, which will raise an exception +if either of the following occur: + + 1. If you try to set its value to ``None`` (or, sometimes, to the empty string). + 2. If the API returns a ``None`` (or empty string) as the field's value. + +For example, given this code: + +.. code-block:: python + + from pyairtable.orm import Model, fields as F + + class MyTable(Model): + class Meta: + ... + + name = F.RequiredTextField("Name") + +The following will all raise an exception: + +.. code-block:: python + + >>> MyTable(name=None) + Traceback (most recent call last): + ... + MissingValue: MyTable.name does not accept empty values + + >>> r = MyTable.from_record(fake_record(Name="Alice")) + >>> r.name + 'Alice' + >>> r.name = None + Traceback (most recent call last): + ... + MissingValue: MyTable.name does not accept empty values + + >>> r = MyTable.from_record(fake_record(Name=None)) + >>> r.name + Traceback (most recent call last): + ... + MissingValue: MyTable.name received an empty value + +One reason to use these fields (sparingly!) might be to avoid adding defensive +null-handling checks all over your code, if you are confident that the workflows +around your Airtable base will not produce an empty value (or that an empty value +is enough of a problem that your code should raise an exception). + + Linked Records ---------------- In addition to standard data type fields, the :class:`~pyairtable.orm.fields.LinkField` -class offers a special behaviour that can fetch linked records, so that you can -traverse between related records. +and :class:`~pyairtable.orm.fields.SingleLinkField` classes will fetch linked records +upon access, so that you can traverse between related records. .. code-block:: python from pyairtable.orm import Model, fields as F - class Company(Model): + class Person(Model): class Meta: ... name = F.TextField("Name") + company = F.SingleLinkField("Company", "Company") - class Person(Model): + class Company(Model): class Meta: ... name = F.TextField("Name") - company = F.LinkField("Company", Company) + people = F.LinkField("People", Person) + .. code-block:: python >>> person = Person.from_id("recZ6qSLw0OCA61ul") >>> person.company - [] - >>> person.company[0].name + + >>> person.company.name 'Acme Corp' + >>> person.company.people + [, ...] pyAirtable will not retrieve field values for a model's linked records until the -first time you access that field. So in the example above, the fields for Company -were loaded when ``person.company`` was called for the first time. After that, -the Company models are persisted, and won't be refreshed until you call +first time you access a field. So in the example above, the fields for Company +were loaded when ``person.company`` was called for the first time. Linked models +are persisted after being created, and won't be refreshed until you call :meth:`~pyairtable.orm.Model.fetch`. .. note:: :class:`~pyairtable.orm.fields.LinkField` will always return a list of values, even if there is only a single value shown in the Airtable UI. It will not respect the `prefersSingleRecordLink `_ - field configuration option, because the API will *always* return linked fields - as a list of record IDs. + field configuration option. If you expect a field to only ever return a single + linked record, use :class:`~pyairtable.orm.fields.SingleLinkField`. Cyclical links @@ -297,18 +477,18 @@ address this: class Meta: ... name = F.TextField("Name") - company = F.LinkField[Company]("Company", Company) - manager = F.LinkField["Person"]("Manager", "Person") # option 2 + company = F.SingleLinkField[Company]("Company", Company) + manager = F.SingleLinkField["Person"]("Manager", "Person") # option 2 reports = F.LinkField["Person"]("Reports", F.LinkSelf) # option 3 .. code-block:: python >>> person = Person.from_id("recZ6qSLw0OCA61ul") >>> person.manager - [] - >>> person.manager[0].reports + + >>> person.manager.reports [, ...] - >>> person.company[0].employees + >>> person.company.employees [, , ...] Breaking down the :class:`~pyairtable.orm.fields.LinkField` invocation above, @@ -326,6 +506,97 @@ there are four components: 4. The model class, the path to the model class, or :data:`~pyairtable.orm.fields.LinkSelf` +Memoizing linked records +""""""""""""""""""""""""""""" + +There are cases where your application may need to retrieve hundreds of nested +models through the ORM, and you don't want to make hundreds of Airtable API calls. +pyAirtable provides a way to pre-fetch and memoize instances for each record, +which will then be reused later by record link fields. + +The usual way to do this is passing ``memoize=True`` to a retrieval method +at the beginning of your code to pre-fetch any records you might need. +For example, you might have the following: + +.. code-block:: python + + from pyairtable.orm import Model, fields as F + from operator import attrgetter + + class Book(Model): + class Meta: ... + title = F.TextField("Title") + published = F.DateField("Publication Date") + + class Author(Model): + class Meta: ... + name = F.TextField("Name") + books = F.LinkField("Books", Book) + + def main(): + books = Book.all(memoize=True) + authors = Author.all(memoize=True) + for author in authors: + print(f"* {author.name}") + for book in sorted(author.books, key=attrgetter("published")): + print(f" - {book.title} ({book.published.isoformat()})") + +This code will perform a series of API calls at the beginning to fetch +all records from the Books and Authors tables, so that ``author.books`` +does not need to request linked records one at a time during the loop. + +If you always want to memoize models retrieved from the API, you can set +``memoize = True`` in the ``Meta`` configuration for your model: + +.. code-block:: python + + class Book(Model): + Meta = {..., "memoize": True} + title = F.TextField("Title") + + class Author(Model): + Meta = {...} + name = F.TextField("Name") + books = F.LinkField("Books", Book) + + Book.first() # this will memoize the book it creates + Author.first().books # this will memoize all books created + Book.all(memoize=False) # this will skip memoization + + +The following methods support the ``memoize=`` keyword argument to control +whether the ORM saves the models it creates for later reuse. If a model is +configured to memoize by default, pass ``memoize=False`` to override it. + +.. list-table:: + :header-rows: 1 + + * - Retrieval function + - Will it reuse saved models? + - Will it call the API? + * - :meth:`Model.all ` + - Never + - Always + * - :meth:`Model.first ` + - Never + - Always + * - :meth:`Model.from_record ` + - Never + - Never + * - :meth:`Model.from_id ` + - Yes + - Yes, unless ``fetch=True`` + * - :meth:`Model.from_ids ` + - Yes + - Yes, unless ``fetch=True`` + * - :meth:`LinkField.populate ` + - Yes + - Yes, unless ``lazy=True`` + * - :meth:`SingleLinkField.populate ` + - Yes + - Yes, unless ``lazy=True`` + + Comments ---------- @@ -340,7 +611,7 @@ comments on a particular record, just like their :class:`~pyairtable.Table` equi Comment( id='comdVMNxslc6jG0Xe', text='Hello, @[usrVMNxslc6jG0Xed]!', - created_time='2023-06-07T17:46:24.435891', + created_time=datetime.datetime(...), last_updated_time=None, mentioned={ 'usrVMNxslc6jG0Xed': Mentioned( @@ -364,6 +635,61 @@ comments on a particular record, just like their :class:`~pyairtable.Table` equi >>> comment.delete() +Attachments in the ORM +---------------------- + +When retrieving attachments from the API, pyAirtable will return a list of +:class:`~pyairtable.api.types.AttachmentDict`. + + >>> model = YourModel.from_id("recMNxslc6jG0XedV") + >>> model.attachments + [ + { + 'id': 'attMNxslc6jG0XedV', + 'url': 'https://dl.airtable.com/...', + 'filename': 'example.jpg', + 'size': 12345, + 'type': 'image/jpeg' + }, + ... + ] + +You can append your own values to this list, and as long as they have +either a ``"id"`` or ``"url"`` key, they will be saved back to the API. + + >>> model.attachments.append({"url": "https://example.com/example.jpg"}) + >>> model.save() + +You can also use :meth:`~pyairtable.orm.lists.AttachmentsList.upload` to +directly upload content to Airtable: + +.. automethod:: pyairtable.orm.lists.AttachmentsList.upload + + +ORM Metadata +------------------ + +Access to the configuration of a model and the schema of its underlying base/table +are available through the :attr:`~pyairtable.orm.Model.meta` attribute: + +.. code-block:: python + + >>> model = YourModel() + >>> model.meta.base_id + 'appaPqizdsNHDvlEm' + >>> model.meta.table_name + 'YourModel' + >>> model.meta.table.schema() + TableSchema(id='appaPqizdsNHDvlEm', name='YourModel', ...) + +For convenience, the schema of ORM-defined fields can be accessed via those field definitions: + +.. code-block:: python + + >>> YourModel.name.field_schema() + FieldSchema(id='fldMNxslc6jG0XedV', name='Name', type='singleLineText', ...) + + ORM Limitations ------------------ @@ -393,20 +719,23 @@ For example: .. code-block:: python + from pyairtable.orm import fields as F + class Person(Model): class Meta: ... name = F.TextField("Name") - manager = F.LinkField["Person"]("Manager", "Person") + manager = F.SingleLinkField["Person"]("Manager", F.LinkSelf) # This field is a formula: {Manager} != BLANK() has_manager = F.IntegerField("Has Manager?", readonly=True) bob = Person.from_id("rec2AqNuHwWcnG871") - assert bob.manager == [] + assert bob.manager is None assert bob.has_manager == 0 - bob.manager = [alice] + alice = Person.from_id("recAB2AqNuHwWcnG8") + bob.manager = alice bob.save() assert bob.has_manager == 0 diff --git a/docs/source/tables.rst b/docs/source/tables.rst index 8d5f4022..670120a0 100644 --- a/docs/source/tables.rst +++ b/docs/source/tables.rst @@ -111,9 +111,8 @@ like :meth:`~pyairtable.Table.iterate` or :meth:`~pyairtable.Table.all`. - |kwarg_user_locale| * - ``time_zone`` - |kwarg_time_zone| - * - ``return_fields_by_field_id`` - .. versionadded:: 1.3.0 - - |kwarg_return_fields_by_field_id| + * - ``use_field_ids`` + - |kwarg_use_field_ids| Return Values @@ -152,32 +151,21 @@ This library will return records as :class:`~pyairtable.api.types.RecordDict`. Formulas ******** -The :mod:`pyairtable.formulas` module provides functionality to help you compose -`Airtable formulas `_. +Methods like :meth:`~pyairtable.Table.all` or :meth:`~pyairtable.Table.first` +accept a ``formula=`` keyword argument so you can filter results using an +`Airtable formula `_. -* :func:`~pyairtable.formulas.match` checks field values from a Python ``dict``: +The simplest option is to pass your formula as a string; however, if your use case +is complex and you want to avoid lots of f-strings and escaping, use +:func:`~pyairtable.formulas.match` to check field values from a ``dict``: .. code-block:: python >>> from pyairtable.formulas import match - >>> formula = match({"First Name": "John", "Age": 21}) - >>> formula - "AND({First Name}='John',{Age}=21)" - >>> table.first(formula=formula) + >>> table.first(formula=match({"First Name": "John", "Age": 21})) {"id": "recUwKa6lbNSMsetH", "fields": {"First Name": "John", "Age": 21}} -* :func:`~pyairtable.formulas.to_airtable_value` converts a Python value - to an expression that can be included in a formula: - - .. code-block:: python - - >>> from pyairtable.formulas import to_airtable_value - >>> to_airtable_value(1) - 1 - >>> to_airtable_value(datetime.date.today()) - '2023-06-13' - -For more on generating formulas, look over the :mod:`pyairtable.formulas` API reference. +For more on generating formulas, read the :doc:`formulas` documentation. Retries @@ -289,7 +277,7 @@ and :meth:`~pyairtable.Table.add_comment` methods will return instances of Comment( id='comdVMNxslc6jG0Xe', text='Hello, @[usrVMNxslc6jG0Xed]!', - created_time='2023-06-07T17:46:24.435891', + created_time=datetime.datetime(...), last_updated_time=None, mentioned={ 'usrVMNxslc6jG0Xed': Mentioned( @@ -311,3 +299,14 @@ and :meth:`~pyairtable.Table.add_comment` methods will return instances of >>> table.comments("recMNxslc6jG0XedV")[0].text 'Never mind!' >>> comment.delete() + +Testing Your Code +----------------- + +pyAirtable provides a :class:`~pyairtable.testing.MockAirtable` class that can be used to +test your code without making real requests to Airtable. + +.. autoclass:: pyairtable.testing.MockAirtable + :noindex: + +For more information, see :mod:`pyairtable.testing`. diff --git a/docs/source/webhooks.rst b/docs/source/webhooks.rst index 5a74d536..5157a215 100644 --- a/docs/source/webhooks.rst +++ b/docs/source/webhooks.rst @@ -1,3 +1,6 @@ +.. include:: _substitutions.rst +.. include:: _warn_latest.rst + Webhooks ============================== @@ -5,8 +8,52 @@ Airtable's `Webhooks API ` to create a webhook. +2. Airtable will ``POST`` notifications to the webhook URL you provided. +3. Use :meth:`WebhookNotification.from_request ` to validate each notification. +4. Use :meth:`Webhook.payloads ` to retrieve new payloads after the notification. + +This means it is technically possible to ignore webhook notifications altogether and to simply +poll a webhook periodically for new payloads. However, this increases the likelihood of running into +`Airtable's API rate limits `__. + +When using webhooks, you need some way to persist the ``cursor`` of the webhook +payload, so that you do not retrieve the same payloads again on subsequent calls, +even if your job is interrupted in the middle of processing a list of payloads. + +For example: + + .. code-block:: python + + from flask import Flask, request + from pyairtable import Api + from pyairtable.models import WebhookNotification + + app = Flask(__name__) + + @app.route("/airtable-webhook", methods=["POST"]) + def airtable_webhook(): + body = request.data + header = request.headers["X-Airtable-Content-MAC"] + secret = app.config["AIRTABLE_WEBHOOK_SECRET"] + event = WebhookNotification.from_request(body, header, secret) + airtable = Api(app.config["AIRTABLE_API_KEY"]) + webhook = airtable.base(event.base.id).webhook(event.webhook.id) + cursor = int(your_database.get(event.webhook, 0)) + 1 + + for payload in webhook.payloads(cursor=cursor): + process_payload(payload) # probably enqueue a background job + your_database.set(event.webhook, payload.cursor + 1) + + return ("", 204) # intentionally empty response + +Methods +------- + +The following methods will be most commonly used for working with payloads. +You can read the full documentation at :mod:`pyairtable.models.webhook`. .. automethod:: pyairtable.Base.add_webhook :noindex: @@ -17,9 +64,8 @@ using a straightforward API within the :class:`~pyairtable.Base` class. .. automethod:: pyairtable.Base.webhook :noindex: -.. automethod:: pyairtable.models.Webhook.payloads +.. automethod:: pyairtable.models.WebhookNotification.from_request :noindex: -.. autoclass:: pyairtable.models.WebhookNotification +.. automethod:: pyairtable.models.Webhook.payloads :noindex: - :members: from_request diff --git a/pyairtable/__init__.py b/pyairtable/__init__.py index cd64ed62..8c4e633a 100644 --- a/pyairtable/__init__.py +++ b/pyairtable/__init__.py @@ -1,11 +1,15 @@ -__version__ = "2.1.0.post1" +__version__ = "3.3.0" -from .api import Api, Base, Table -from .api.retrying import retry_strategy +from pyairtable.api import Api, Base, Table +from pyairtable.api.enterprise import Enterprise +from pyairtable.api.retrying import retry_strategy +from pyairtable.api.workspace import Workspace __all__ = [ "Api", "Base", + "Enterprise", "Table", + "Workspace", "retry_strategy", ] diff --git a/pyairtable/_compat.py b/pyairtable/_compat.py deleted file mode 100644 index dbc06d85..00000000 --- a/pyairtable/_compat.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import TYPE_CHECKING - -if TYPE_CHECKING: # mypy really does not like this conditional import. - import pydantic as pydantic -else: - # Pydantic v2 broke a bunch of stuff. Luckily they provide a built-in v1. - try: - import pydantic.v1 as pydantic - except ImportError: - import pydantic - -__all__ = ["pydantic"] diff --git a/pyairtable/api/__init__.py b/pyairtable/api/__init__.py index 724aacb8..151feda3 100644 --- a/pyairtable/api/__init__.py +++ b/pyairtable/api/__init__.py @@ -1,6 +1,6 @@ -from .api import Api -from .base import Base -from .table import Table +from pyairtable.api.api import Api +from pyairtable.api.base import Base +from pyairtable.api.table import Table __all__ = [ "Api", diff --git a/pyairtable/api/api.py b/pyairtable/api/api.py index 7f4f5dc0..3dae473c 100644 --- a/pyairtable/api/api.py +++ b/pyairtable/api/api.py @@ -1,17 +1,25 @@ -import posixpath -from functools import lru_cache -from typing import Any, Dict, Iterator, Optional, Sequence, Tuple, TypeVar, Union +from functools import cached_property +from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, TypeVar, Union import requests from requests.sessions import Session from typing_extensions import TypeAlias -import pyairtable.api.base -import pyairtable.api.table from pyairtable.api import retrying +from pyairtable.api.base import Base +from pyairtable.api.enterprise import Enterprise from pyairtable.api.params import options_to_json_and_params, options_to_params +from pyairtable.api.table import Table from pyairtable.api.types import UserAndScopesDict, assert_typed_dict -from pyairtable.utils import chunked +from pyairtable.api.workspace import Workspace +from pyairtable.models.schema import Bases +from pyairtable.utils import ( + Url, + UrlBuilder, + cache_unless_forced, + chunked, + enterprise_only, +) T = TypeVar("T") TimeoutTuple: TypeAlias = Tuple[int, int] @@ -37,6 +45,19 @@ class Api: #: Airtable-imposed limit on the length of a URL (including query parameters). MAX_URL_LENGTH = 16000 + # Cached metadata to reduce API calls + _bases: Optional[Dict[str, "Base"]] = None + + endpoint_url: Url + session: Session + use_field_ids: bool + + class _urls(UrlBuilder): + whoami = Url("meta/whoami") + bases = Url("meta/bases") + + urls = cached_property(_urls) + def __init__( self, api_key: str, @@ -44,6 +65,7 @@ def __init__( timeout: Optional[TimeoutTuple] = None, retry_strategy: Optional[Union[bool, retrying.Retry]] = True, endpoint_url: str = "https://api.airtable.com", + use_field_ids: bool = False, ): """ Args: @@ -59,6 +81,8 @@ def __init__( (see :func:`~pyairtable.retry_strategy` for details). endpoint_url: The API endpoint to use. Override this if you are using a debugging or caching proxy. + use_field_ids: If ``True``, all API requests will return responses + with field IDs instead of field names. """ if retry_strategy is True: retry_strategy = retrying.retry_strategy() @@ -67,9 +91,10 @@ def __init__( else: self.session = retrying._RetryingSession(retry_strategy) - self.endpoint_url = endpoint_url + self.endpoint_url = Url(endpoint_url) self.timeout = timeout self.api_key = api_key + self.use_field_ids = use_field_ids @property def api_key(self) -> str: @@ -86,25 +111,126 @@ def api_key(self, value: str) -> None: def __repr__(self) -> str: return "" - @lru_cache - def base(self, base_id: str) -> "pyairtable.api.base.Base": + def whoami(self) -> UserAndScopesDict: + """ + Return the current user ID and (if connected via OAuth) the list of scopes. + See `Get user ID & scopes `_ for more information. + """ + data = self.request("GET", self.urls.whoami) + return assert_typed_dict(UserAndScopesDict, data) + + def workspace(self, workspace_id: str) -> Workspace: + return Workspace(self, workspace_id) + + def base( + self, + base_id: str, + *, + validate: bool = False, + force: bool = False, + ) -> "Base": + """ + Return a new :class:`Base` instance that uses this instance of :class:`Api`. + + Args: + base_id: |arg_base_id| + validate: |kwarg_validate_metadata| + force: |kwarg_force_metadata| + + Raises: + KeyError: if ``validate=True`` and the given base ID does not exist. + """ + if validate: + info = self._base_info(force=force).base(base_id) + return self._base_from_info(info) + return Base(self, base_id) + + @cache_unless_forced + def _base_info(self) -> Bases: """ - Returns a new :class:`Base` instance that uses this instance of :class:`Api`. + Return a schema object that represents all bases available via the API. + """ + url = self.urls.bases + data = { + "bases": [ + base_info + for page in self.iterate_requests("GET", url) + for base_info in page["bases"] + ] + } + return Bases.from_api(data, self) + + def _base_from_info(self, base_info: Bases.Info) -> "Base": + return Base( + self, + base_info.id, + name=base_info.name, + permission_level=base_info.permission_level, + ) + + def bases(self, *, force: bool = False) -> List["Base"]: """ - return pyairtable.api.base.Base(self, base_id) + Retrieve the base's schema and return a list of :class:`Base` instances. - def table(self, base_id: str, table_name: str) -> "pyairtable.api.table.Table": + Args: + force: |kwarg_force_metadata| + + Usage: + >>> api.bases() + [ + , + + ] """ - Returns a new :class:`Table` instance that uses this instance of :class:`Api`. + return [ + self._base_from_info(info) for info in self._base_info(force=force).bases + ] + + def create_base( + self, + workspace_id: str, + name: str, + tables: Sequence[Dict[str, Any]], + ) -> "Base": """ - return self.base(base_id).table(table_name) + Create a base in the given workspace. + + See https://airtable.com/developers/web/api/create-base - def build_url(self, *components: str) -> str: + Args: + workspace_id: The ID of the workspace where the new base will live. + name: The name to give to the new base. Does not need to be unique. + tables: A list of ``dict`` objects that conform to Airtable's + `Table model `__. """ - Returns a URL to the Airtable API endpoint with the given URL components, + return self.workspace(workspace_id).create_base(name, tables) + + def table( + self, + base_id: str, + table_name: str, + *, + validate: bool = False, + force: bool = False, + ) -> "Table": + """ + Build a new :class:`Table` instance that uses this instance of :class:`Api`. + + Args: + base_id: |arg_base_id| + table_name: The Airtable table's ID or name. + validate: |kwarg_validate_metadata| + force: |kwarg_force_metadata| + """ + base = self.base(base_id, validate=validate, force=force) + return base.table(table_name, validate=validate, force=force) + + def build_url(self, *components: str) -> Url: + """ + Build a URL to the Airtable API endpoint with the given URL components, including the API version number. """ - return posixpath.join(self.endpoint_url, self.VERSION, *components) + return self.endpoint_url / self.VERSION // components def request( self, @@ -116,10 +242,8 @@ def request( json: Optional[Dict[str, Any]] = None, ) -> Any: """ - Makes a request to the Airtable API, optionally converting a GET to a POST - if the URL exceeds the API's maximum URL length. - - See https://support.airtable.com/docs/enforcement-of-url-length-limit-for-web-api-requests + Make a request to the Airtable API, optionally converting a GET to a POST if the URL exceeds the + `maximum URL length `__. Args: method: HTTP method to use. @@ -162,15 +286,46 @@ def request( json=json, ) - response = self.session.send(prepared, timeout=self.timeout) + response = self.session.request( + method=method, + url=url, + params=request_params, + json=json, + ) return self._process_response(response) + def get(self, url: str, **kwargs: Any) -> Any: + """ + Make a GET request to the Airtable API. + See :meth:`~Api.request` for keyword arguments. + """ + return self.request("GET", url, **kwargs) + + def post(self, url: str, **kwargs: Any) -> Any: + """ + Make a POST request to the Airtable API. + See :meth:`~Api.request` for keyword arguments. + """ + return self.request("POST", url, **kwargs) + + def patch(self, url: str, **kwargs: Any) -> Any: + """ + Make a PATCH request to the Airtable API. + See :meth:`~Api.request` for keyword arguments. + """ + return self.request("PATCH", url, **kwargs) + + def delete(self, url: str, **kwargs: Any) -> Any: + """ + Make a DELETE request to the Airtable API. + See :meth:`~Api.request` for keyword arguments. + """ + return self.request("DELETE", url, **kwargs) + def _process_response(self, response: requests.Response) -> Any: try: response.raise_for_status() except requests.exceptions.HTTPError as exc: - err_msg = str(exc) - # Attempt to get Error message from response, Issue #16 try: error_dict = response.json() @@ -178,8 +333,7 @@ def _process_response(self, response: requests.Response) -> Any: pass else: if "error" in error_dict: - err_msg += " [Error: {}]".format(error_dict["error"]) - exc.args = (*exc.args, err_msg) + exc.args = (*exc.args, repr(error_dict["error"])) raise exc # Some Airtable endpoints will respond with an empty body and a 200. @@ -193,10 +347,11 @@ def iterate_requests( url: str, fallback: Optional[Tuple[str, str]] = None, options: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, offset_field: str = "offset", ) -> Iterator[Any]: """ - Makes one or more requests and iterates through each result. + Make one or more requests and iterates through each result. If the response payload contains an 'offset' value, this method will perform another request with that offset value as a parameter (query params for GET, @@ -211,30 +366,46 @@ def iterate_requests( fallback: The method and URL to use if we have to convert a GET to a POST. options: Airtable-specific query params to use while fetching records. See :ref:`Parameters` for valid options. + params: Additional query params to append to the URL as-is. offset_field: The key to use in the API response to determine whether there are additional pages to retrieve. """ options = options or {} + params = params or {} + + def _get_offset_field(response: Dict[str, Any]) -> Optional[str]: + value = response.get("pagination") or response # see Enterprise.audit_log + field_names = offset_field.split(".") + while field_names: + if not (value := value.get(field_names.pop(0))): + return None + return str(value) + while True: - response = self.request(method, url, fallback=fallback, options=options) + response = self.request( + method=method, + url=url, + fallback=fallback, + options=options, + params=params, + ) yield response if not isinstance(response, dict): return - if not (offset := response.get(offset_field)): + if not (offset := _get_offset_field(response)): return options = {**options, offset_field: offset} def chunked(self, iterable: Sequence[T]) -> Iterator[Sequence[T]]: """ - Iterates through chunks of the given sequence that are equal in size + Iterate through chunks of the given sequence that are equal in size to the maximum number of records per request allowed by the API. """ return chunked(iterable, self.MAX_RECORDS_PER_REQUEST) - def whoami(self) -> UserAndScopesDict: + @enterprise_only + def enterprise(self, enterprise_account_id: str) -> Enterprise: """ - Return the current user ID and (if connected via OAuth) the list of scopes. - See `Get user ID & scopes `_ for more information. + Build an object representing an enterprise account. """ - data = self.request("GET", self.build_url("meta/whoami")) - return assert_typed_dict(UserAndScopesDict, data) + return Enterprise(self, enterprise_account_id) diff --git a/pyairtable/api/base.py b/pyairtable/api/base.py index 43dc3951..2efd8ef2 100644 --- a/pyairtable/api/base.py +++ b/pyairtable/api/base.py @@ -1,29 +1,80 @@ import warnings -from functools import lru_cache -from typing import Any, Dict, List, Union +from functools import cached_property +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union -import pyairtable.api.api import pyairtable.api.table +from pyairtable.models.schema import BaseCollaborators, BaseSchema, BaseShares from pyairtable.models.webhook import ( CreateWebhook, CreateWebhookResponse, Webhook, WebhookSpecification, ) +from pyairtable.utils import Url, UrlBuilder, cache_unless_forced, enterprise_only + +if TYPE_CHECKING: + from pyairtable.api.api import Api class Base: """ Represents an Airtable base. + + Usage: + >>> base = api.base("appNxslc6jG0XedVM") + >>> table = base.table("Table Name") + >>> records = table.all() """ #: The connection to the Airtable API. - api: "pyairtable.api.api.Api" + api: "Api" #: The base ID, in the format ``appXXXXXXXXXXXXXX`` id: str - def __init__(self, api: Union["pyairtable.api.api.Api", str], base_id: str): + #: The permission level the current user has on the base + permission_level: Optional[str] + + # Cached metadata to reduce API calls + _collaborators: Optional[BaseCollaborators] = None + _schema: Optional[BaseSchema] = None + _shares: Optional[List[BaseShares.Info]] = None + + class _urls(UrlBuilder): + #: URL for retrieving the base's metadata and collaborators. + meta = Url("meta/bases/{id}") + + #: URL for retrieving information about the base's interfaces. + interfaces = meta / "interfaces" + + #: URL for retrieving the base's shares. + shares = meta / "shares" + + #: URL for retrieving the base's schema. + tables = meta / "tables" + + #: URL for POST requests that modify collaborations on the base. + collaborators = meta / "collaborators" + + #: URL for retrieving or modifying the base's webhooks. + webhooks = Url("bases/{id}/webhooks") + + def interface(self, interface_id: str) -> Url: + """ + URL for retrieving information about a specific interface on the base. + """ + return self.interfaces / interface_id + + urls = cached_property(_urls) + + def __init__( + self, + api: Union["Api", str], + base_id: str, + *, + name: Optional[str] = None, + permission_level: Optional[str] = None, + ): """ Old style constructor takes ``str`` arguments, and will create its own instance of :class:`Api`. @@ -38,7 +89,10 @@ def __init__(self, api: Union["pyairtable.api.api.Api", str], base_id: str): Args: api: An instance of :class:`Api` or an Airtable access token. - base_id: |arg_base_id| + base_id: An Airtable base ID. + name: The name of the Airtable base, if known. + permission_level: The permission level the current authenticated user + has upon the Airtable base, if known. """ if isinstance(api, str): warnings.warn( @@ -47,37 +101,120 @@ def __init__(self, api: Union["pyairtable.api.api.Api", str], base_id: str): category=DeprecationWarning, stacklevel=2, ) - api = pyairtable.api.api.Api(api) + + from pyairtable import Api + + api = Api(api) self.api = api self.id = base_id + self.permission_level = permission_level + self._name = name + + @property + def name(self) -> Optional[str]: + """ + The name of the base, if provided to the constructor + or available in cached base information. + """ + if self._collaborators: + return self._collaborators.name + return self._name def __repr__(self) -> str: - return f"" + repr = f"" - @lru_cache - def table(self, table_name: str) -> "pyairtable.api.table.Table": + def table( + self, + id_or_name: str, + *, + validate: bool = False, + force: bool = False, + ) -> "pyairtable.api.table.Table": """ - Returns a new :class:`Table` instance using all shared - attributes from :class:`Base`. + Build a new :class:`Table` instance using this instance of :class:`Base`. Args: - table_name: An Airtable table name. Table name should be unencoded, - as shown on browser. + id_or_name: |arg_table_id_or_name| + validate: |kwarg_validate_metadata| + force: |kwarg_force_metadata| + + Usage: + >>> base.table('Apartments') + """ - return pyairtable.api.table.Table(None, self, table_name) + if validate: + schema = self.schema(force=force).table(id_or_name) + return pyairtable.api.table.Table(None, self, schema) + return pyairtable.api.table.Table(None, self, id_or_name) - @property - def url(self) -> str: - return self.api.build_url(self.id) + def tables(self, *, force: bool = False) -> List["pyairtable.api.table.Table"]: + """ + Retrieve the base's schema and returns a list of :class:`Table` instances. - @property - def webhooks_url(self) -> str: - return self.api.build_url("bases", self.id, "webhooks") + Args: + force: |kwarg_force_metadata| + + Usage: + >>> base.tables() + [ +
, +
+ ] + """ + return [ + pyairtable.api.table.Table(None, self, table_schema) + for table_schema in self.schema(force=force).tables + ] + + def create_table( + self, + name: str, + fields: Sequence[Dict[str, Any]], + description: Optional[str] = None, + ) -> "pyairtable.api.table.Table": + """ + Create a table in the given base. + + Args: + name: The unique table name. + fields: A list of ``dict`` objects that conform to the + `Airtable field model `__. + description: The table description. Must be no longer than 20k characters. + """ + url = self.urls.tables + payload = {"name": name, "fields": fields} + if description: + payload["description"] = description + response = self.api.post(url, json=payload) + return self.table(response["id"], validate=True, force=True) + + @cache_unless_forced + def schema(self) -> BaseSchema: + """ + Retrieve the schema of all tables in the base and caches it. + + Usage: + >>> base.schema().tables + [TableSchema(...), TableSchema(...), ...] + >>> base.schema().table("tblXXXXXXXXXXXXXX") + TableSchema(id="tblXXXXXXXXXXXXXX", ...) + >>> base.schema().table("My Table") + TableSchema(id="...", name="My Table", ...) + """ + url = self.urls.tables + params = {"include": ["visibleFieldIds"]} + data = self.api.get(url, params=params) + return BaseSchema.from_api(data, self.api, context=self) def webhooks(self) -> List[Webhook]: """ - Retrieves all the base's webhooks from the API + Retrieve all the base's webhooks (see: `List webhooks `_). Usage: @@ -91,24 +228,20 @@ def webhooks(self) -> List[Webhook]: last_successful_notification_time=None, notification_url="https://example.com", last_notification_result=None, - expiration_time="2023-07-01T00:00:00.000Z", + expiration_time=datetime.datetime(...), specification: WebhookSpecification(...) ) ] """ - response = self.api.request("GET", self.webhooks_url) + response = self.api.get(self.urls.webhooks) return [ - Webhook.from_api( - api=self.api, - url=f"{self.webhooks_url}/{data['id']}", - obj=data, - ) + Webhook.from_api(data, self.api, context=self) for data in response["webhooks"] ] def webhook(self, webhook_id: str) -> Webhook: """ - Returns a single webhook or raises ``KeyError`` if the given ID is invalid. + Build a single webhook or raises ``KeyError`` if the given ID is invalid. Airtable's API does not permit retrieving a single webhook, so this function will call :meth:`~webhooks` and simply return one item from the list. @@ -124,7 +257,7 @@ def add_webhook( spec: Union[WebhookSpecification, Dict[Any, Any]], ) -> CreateWebhookResponse: """ - Creates a webhook on the base with the given + Create a webhook on the base with the given `webhooks specification `_. The return value will contain a unique secret that must be saved @@ -149,7 +282,7 @@ def add_webhook( CreateWebhookResponse( id='ach00000000000001', mac_secret_base64='c3VwZXIgZHVwZXIgc2VjcmV0', - expiration_time='2023-07-01T00:00:00.000Z' + expiration_time=datetime.datetime(...) ) Raises: @@ -162,9 +295,40 @@ def add_webhook( can also provide :class:`~pyairtable.models.webhook.WebhookSpecification`. """ if isinstance(spec, dict): - spec = WebhookSpecification.parse_obj(spec) + spec = WebhookSpecification.from_api(spec, self.api) create = CreateWebhook(notification_url=notify_url, specification=spec) - request = create.dict(by_alias=True, exclude_unset=True) - response = self.api.request("POST", self.webhooks_url, json=request) - return CreateWebhookResponse.parse_obj(response) + request = create.model_dump(by_alias=True, exclude_unset=True) + response = self.api.post(self.urls.webhooks, json=request) + return CreateWebhookResponse.from_api(response, self.api) + + @enterprise_only + @cache_unless_forced + def collaborators(self) -> "BaseCollaborators": + """ + Retrieve `base collaborators `__. + """ + params = {"include": ["collaborators", "inviteLinks", "interfaces"]} + data = self.api.get(self.urls.meta, params=params) + return BaseCollaborators.from_api(data, self.api, context=self) + + @enterprise_only + @cache_unless_forced + def shares(self) -> List[BaseShares.Info]: + """ + Retrieve `base shares `__. + """ + data = self.api.get(self.urls.shares) + shares_obj = BaseShares.from_api(data, self.api, context=self) + return shares_obj.shares + + @enterprise_only + def delete(self) -> None: + """ + Delete the base. + + Usage: + >>> base = api.base("appMxESAta6clCCwF") + >>> base.delete() + """ + self.api.delete(self.urls.meta) diff --git a/pyairtable/api/enterprise.py b/pyairtable/api/enterprise.py new file mode 100644 index 00000000..90213bbd --- /dev/null +++ b/pyairtable/api/enterprise.py @@ -0,0 +1,655 @@ +from datetime import date, datetime +from functools import cached_property, partialmethod +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + Iterator, + List, + Literal, + Optional, + Union, +) + +import pydantic +from typing_extensions import Self + +from pyairtable.models._base import AirtableModel, rebuild_models +from pyairtable.models.audit import AuditLogResponse +from pyairtable.models.schema import EnterpriseInfo, NestedId, UserGroup, UserInfo +from pyairtable.utils import ( + Url, + UrlBuilder, + cache_unless_forced, + coerce_iso_str, + coerce_list_str, + enterprise_only, +) + +if TYPE_CHECKING: + from pyairtable.api.api import Api + from pyairtable.api.workspace import Workspace + + +@enterprise_only +class Enterprise: + """ + Represents an Airtable enterprise account. + + >>> enterprise = api.enterprise("entUBq2RGdihxl3vU") + >>> enterprise.info().workspace_ids + ['wspmhESAta6clCCwF', ...] + """ + + class _urls(UrlBuilder): + #: URL for retrieving basic information about the enterprise. + meta = Url("meta/enterpriseAccounts/{id}") + + #: URL for retrieving information about all users. + users = meta / "users" + + #: URL for retrieving information about all user groups. + groups = Url("meta/groups") + + #: URL for claiming a user into an enterprise. + claim_users = meta / "claim/users" + + #: URL for retrieving audit log events. + audit_log = meta / "auditLogEvents" + + #: URL for managing descendant enterprise accounts. + descendants = meta / "descendants" + + #: URL for moving user groups between enterprise accounts. + move_groups = meta / "moveGroups" + + #: URL for moving workspaces between enterprise accounts. + move_workspaces = meta / "moveWorkspaces" + + #: URL for creating a new workspace. + create_workspace = Url("meta/workspaces") + + def user(self, user_id: str) -> Url: + """ + URL for retrieving information about a single user. + """ + return self.users / user_id + + def group(self, group_id: str) -> Url: + """ + URL for retrieving information about a single user group. + """ + return self.groups / group_id + + def admin_access(self, action: Literal["grant", "revoke"]) -> Url: + """ + URL for granting or revoking admin access to one or more users. + """ + return self.meta / f"users/{action}AdminAccess" + + def remove_user(self, user_id: str) -> Url: + """ + URL for removing a user from the enterprise. + """ + return self.user(user_id) / "remove" + + #: URL for granting admin access to one or more users. + grant_admin = partialmethod(admin_access, "grant") + + #: URL for revoking admin access from one or more users. + revoke_admin = partialmethod(admin_access, "revoke") + + urls = cached_property(_urls) + + def __init__(self, api: "Api", workspace_id: str): + self.api = api + self.id = workspace_id + self._info: Optional[EnterpriseInfo] = None + + @cache_unless_forced + def info( + self, + *, + aggregated: bool = False, + descendants: bool = False, + ) -> EnterpriseInfo: + """ + Retrieve basic information about the enterprise, caching the result. + Calls `Get enterprise `__. + + Args: + aggregated: if ``True``, include aggregated values across the enterprise. + descendants: if ``True``, include information about the enterprise's descendant orgs. + """ + include = [] + if aggregated: + include.append("aggregated") + if descendants: + include.append("descendants") + params = {"include": include} + response = self.api.get(self.urls.meta, params=params) + return EnterpriseInfo.from_api(response, self.api) + + def group(self, group_id: str, collaborations: bool = True) -> UserGroup: + """ + Retrieve information on a single user group with the given ID. + + Args: + group_id: A user group ID (``grpQBq2RGdihxl3vU``). + collaborations: If ``False``, no collaboration data will be requested + from Airtable. This may result in faster responses. + """ + params = {"include": ["collaborations"] if collaborations else []} + payload = self.api.get(self.urls.group(group_id), params=params) + return UserGroup.model_validate(payload) + + def user( + self, + id_or_email: str, + *, + collaborations: bool = True, + aggregated: bool = False, + descendants: bool = False, + ) -> UserInfo: + """ + Retrieve information on a single user with the given ID or email. + + Args: + id_or_email: A user ID (``usrQBq2RGdihxl3vU``) or email address. + collaborations: If ``False``, no collaboration data will be requested + from Airtable. This may result in faster responses. + aggregated: If ``True``, includes the user's aggregated values + across this enterprise account and its descendants. + descendants: If ``True``, includes information about the user + in a ``dict`` keyed per descendant enterprise account. + """ + users = self.users( + [id_or_email], + collaborations=collaborations, + aggregated=aggregated, + descendants=descendants, + ) + return users[0] + + def users( + self, + ids_or_emails: Iterable[str], + *, + collaborations: bool = True, + aggregated: bool = False, + descendants: bool = False, + ) -> List[UserInfo]: + """ + Retrieve information on the users with the given IDs or emails. + + Read more at `Get users by ID or email `__. + + Args: + ids_or_emails: A sequence of user IDs (``usrQBq2RGdihxl3vU``) + or email addresses (or both). + collaborations: If ``False``, no collaboration data will be requested + from Airtable. This may result in faster responses. + aggregated: If ``True``, includes the user's aggregated values + across this enterprise account and its descendants. + descendants: If ``True``, includes information about the user + in a ``dict`` keyed per descendant enterprise account. + """ + user_ids: List[str] = [] + emails: List[str] = [] + for value in ids_or_emails: + (emails if "@" in value else user_ids).append(value) + + include = [] + if collaborations: + include.append("collaborations") + if aggregated: + include.append("aggregated") + if descendants: + include.append("descendants") + + response = self.api.get( + url=self.urls.users, + params={ + "id": user_ids, + "email": emails, + "include": include, + }, + ) + # key by user ID to avoid returning duplicates + users = { + info.id: info + for user_obj in response["users"] + if (info := UserInfo.from_api(user_obj, self.api, context=self)) + } + return list(users.values()) + + def audit_log( + self, + *, + page_size: Optional[int] = None, + page_limit: Optional[int] = None, + sort_asc: Optional[bool] = False, + previous: Optional[str] = None, + next: Optional[str] = None, + start_time: Optional[Union[str, date, datetime]] = None, + end_time: Optional[Union[str, date, datetime]] = None, + user_id: Optional[Union[str, Iterable[str]]] = None, + event_type: Optional[Union[str, Iterable[str]]] = None, + model_id: Optional[Union[str, Iterable[str]]] = None, + category: Optional[Union[str, Iterable[str]]] = None, + ) -> Iterator[AuditLogResponse]: + """ + Retrieve and yield results from the `Audit Log `__, + one page of results at a time. Each result is an instance of :class:`~pyairtable.models.audit.AuditLogResponse` + and contains the pagination IDs returned from the API, as described in the linked documentation. + + By default, the Airtable API will return up to 180 days of audit log events, going backwards from most recent. + Retrieving all records may take some time, but is as straightforward as: + + >>> enterprise = Enterprise("entYourEnterpriseId") + >>> events = [ + ... event + ... for page in enterprise.audit_log() + ... for event in page.events + ... ] + + If you are creating a record of all audit log events, you probably want to start with the earliest + events in the retention window and iterate chronologically. You'll likely have a job running + periodically in the background, so you'll need some way to persist the pagination IDs retrieved + from the API in case that job is interrupted and needs to be restarted. + + The sample code below will use a local file to remember the next page's ID, so that if the job is + interrupted, it will resume where it left off (potentially processing some entries twice). + + .. code-block:: python + + import os + import shelve + import pyairtable + + def handle_event(event): + print(event) + + api = pyairtable.Api(os.environ["AIRTABLE_API_KEY"]) + enterprise = api.enterprise(os.environ["AIRTABLE_ENTERPRISE_ID"]) + persistence = shelve.open("audit_log.db") + first_page = persistence.get("next", None) + + for page in enterprise.audit_log(sort_asc=True, next=first_page): + for event in page.events: + handle_event(event) + persistence["next"] = page.pagination.next + + For more information on any of the keyword parameters below, refer to the + `audit log events `__ + API documentation. + + Args: + page_size: How many events per page to return (maximum 100). + page_limit: How many pages to return before stopping. + sort_asc: Whether to sort in ascending order (earliest to latest) + rather than descending order (latest to earliest). + previous: Requests the previous page of results from the given ID. + See the `audit log integration guide `__ + for more information on pagination parameters. + next: Requests the next page of results according to the given ID. + See the `audit log integration guide `__ + for more information on pagination parameters. + start_time: Earliest timestamp to retrieve (inclusive). + end_time: Latest timestamp to retrieve (inclusive). + originating_user_id: Retrieve audit log events originating + from the provided user ID or IDs (maximum 100). + event_type: Retrieve audit log events falling under the provided + `audit log event type `__ + or types (maximum 100). + model_id: Retrieve audit log events taking action on, or involving, + the provided model ID or IDs (maximum 100). + category: Retrieve audit log events belonging to the provided + audit log event category or categories. + + Returns: + An object representing a single page of audit log results. + """ + + start_time = coerce_iso_str(start_time) + end_time = coerce_iso_str(end_time) + user_id = coerce_list_str(user_id) + event_type = coerce_list_str(event_type) + model_id = coerce_list_str(model_id) + category = coerce_list_str(category) + params = { + "startTime": start_time, + "endTime": end_time, + "originatingUserId": user_id, + "eventType": event_type, + "modelId": model_id, + "category": category, + "pageSize": page_size, + "sortOrder": ("ascending" if sort_asc else "descending"), + "previous": previous, + "next": next, + } + params = {k: v for (k, v) in params.items() if v} + offset_field = "next" if sort_asc else "previous" + iter_requests = self.api.iterate_requests( + method="GET", + url=self.urls.audit_log, + params=params, + offset_field=offset_field, + ) + for count, response in enumerate(iter_requests, start=1): + parsed = AuditLogResponse.model_validate(response) + yield parsed + if not parsed.events: + return + if page_limit is not None and count >= page_limit: + return + + def remove_user( + self, + user_id: str, + replacement: Optional[str] = None, + *, + descendants: bool = False, + ) -> "UserRemoved": + """ + Unshare a user from all enterprise workspaces, bases, and interfaces. + If applicable, the user will also be removed from as an enterprise admin. + + See `Remove user from enterprise `__ + for more information. + + Args: + user_id: The user ID. + replacement: If the user is the sole owner of any workspaces, you must + specify a replacement user ID to be added as the new owner of such + workspaces. If the user is not the sole owner of any workspaces, + this is optional and will be ignored if provided. + descendants: If ``True``, removes the user from descendant enterprise accounts. + """ + url = self.urls.remove_user(user_id) + payload: Dict[str, Any] = {"isDryRun": False} + if replacement: + payload["replacementOwnerId"] = replacement + if descendants: + payload["removeFromDescendants"] = True + response = self.api.post(url, json=payload) + return UserRemoved.from_api(response, self.api, context=self) + + def claim_users( + self, users: Dict[str, Literal["managed", "unmanaged"]] + ) -> "ManageUsersResponse": + """ + Batch manage organizations enterprise account users. This endpoint allows you + to change a user's membership status from being unmanaged to being an + organization member, and vice versa. + + See `Manage user membership `__ + for more information. + + Args: + users: A ``dict`` mapping user IDs or emails to the desired state, + either ``"managed"`` or ``"unmanaged"``. + """ + payload = { + "users": [ + { + ("email" if "@" in key else "id"): key, + "state": value, + } + for (key, value) in users.items() + ] + } + response = self.api.post(self.urls.claim_users, json=payload) + return ManageUsersResponse.from_api(response, self.api, context=self) + + def delete_users(self, emails: Iterable[str]) -> "DeleteUsersResponse": + """ + Delete multiple users by email. + + Args: + emails: A list or other iterable of email addresses. + """ + response = self.api.delete(self.urls.users, params={"email": list(emails)}) + return DeleteUsersResponse.from_api(response, self.api, context=self) + + def grant_admin(self, *users: Union[str, UserInfo]) -> "ManageUsersResponse": + """ + Grant admin access to one or more users. + + Args: + users: One or more user IDs, email addresses, or instances of + :class:`~pyairtable.models.schema.UserInfo`. + """ + return self._post_admin_access("grant", users) + + def revoke_admin(self, *users: Union[str, UserInfo]) -> "ManageUsersResponse": + """ + Revoke admin access to one or more users. + + Args: + users: One or more user IDs, email addresses, or instances of + :class:`~pyairtable.models.schema.UserInfo`. + """ + return self._post_admin_access("revoke", users) + + def _post_admin_access( + self, action: Literal["grant", "revoke"], users: Iterable[Union[str, UserInfo]] + ) -> "ManageUsersResponse": + response = self.api.post( + self.urls.admin_access(action), + json={ + "users": [ + {"email": user_id} if "@" in user_id else {"id": user_id} + for user in users + for user_id in [user.id if isinstance(user, UserInfo) else user] + ] + }, + ) + return ManageUsersResponse.from_api(response, self.api, context=self) + + def create_descendant(self, name: str) -> Self: + """ + Creates a descendant enterprise account of the enterprise account. + Descendant enterprise accounts can only be created for root enterprise accounts + with the Enterprise Hub feature enabled. + + See `Create descendant enterprise `__. + + Args: + name: The name to give the new account. + """ + response = self.api.post(self.urls.descendants, json={"name": name}) + return self.__class__(self.api, response["id"]) + + def move_groups( + self, + group_ids: Iterable[str], + target: Union[str, Self], + ) -> "MoveGroupsResponse": + """ + Move one or more user groups from the current enterprise account + into a different enterprise account within the same organization. + + See `Move user groups `__. + + Args: + group_ids: User group IDs. + target: The ID of the target enterprise, or an instance of :class:`~pyairtable.Enterprise`. + """ + if isinstance(target, Enterprise): + target = target.id + response = self.api.post( + self.urls.move_groups, + json={ + "groupIds": group_ids, + "targetEnterpriseAccountId": target, + }, + ) + return MoveGroupsResponse.from_api(response, self.api, context=self) + + def move_workspaces( + self, + workspace_ids: Iterable[str], + target: Union[str, Self], + ) -> "MoveWorkspacesResponse": + """ + Move one or more workspaces from the current enterprise account + into a different enterprise account within the same organization. + + See `Move workspaces `__. + + Args: + workspace_ids: The list of workspace IDs. + target: The ID of the target enterprise, or an instance of :class:`~pyairtable.Enterprise`. + """ + if isinstance(target, Enterprise): + target = target.id + response = self.api.post( + self.urls.move_workspaces, + json={ + "workspaceIds": workspace_ids, + "targetEnterpriseAccountId": target, + }, + ) + return MoveWorkspacesResponse.from_api(response, self.api, context=self) + + def create_workspace(self, name: str) -> "Workspace": + """ + Creates a new workspace with the provided name within the enterprise account + and returns the workspace ID. The requesting user must be an active effective + admin of the enterprise account; the created workspace's owner will be the user + who makes the request. + + See `Create workspace `__. + + Args: + name: The name of the workspace to be created. + + Returns: + The ID of the newly created workspace. + """ + response = self.api.post( + self.urls.create_workspace, + json={ + "enterpriseAccountId": self.id, + "name": name, + }, + ) + return self.api.workspace(str(response["id"])) + + +class UserRemoved(AirtableModel): + """ + Returned from the `Remove user from enterprise `__ + endpoint. + """ + + was_user_removed_as_admin: bool + shared: "UserRemoved.Shared" + unshared: "UserRemoved.Unshared" + + class Shared(AirtableModel): + workspaces: List["UserRemoved.Shared.Workspace"] + + class Workspace(AirtableModel): + permission_level: str + workspace_id: str + workspace_name: str + user_id: str = "" + deleted_time: Optional[datetime] = None + enterprise_account_id: Optional[str] = None + + class Unshared(AirtableModel): + bases: List["UserRemoved.Unshared.Base"] + interfaces: List["UserRemoved.Unshared.Interface"] + workspaces: List["UserRemoved.Unshared.Workspace"] + + class Base(AirtableModel): + user_id: str + base_id: str + base_name: str + former_permission_level: str + deleted_time: Optional[datetime] = None + enterprise_account_id: Optional[str] = None + + class Interface(AirtableModel): + user_id: str + base_id: str + interface_id: str + interface_name: str + former_permission_level: str + deleted_time: Optional[datetime] = None + enterprise_account_id: Optional[str] = None + + class Workspace(AirtableModel): + user_id: str + former_permission_level: str + workspace_id: str + workspace_name: str + deleted_time: Optional[datetime] = None + enterprise_account_id: Optional[str] = None + + +class DeleteUsersResponse(AirtableModel): + """ + Returned from the `Delete users by email `__ + endpoint. + """ + + deleted_users: List["DeleteUsersResponse.UserInfo"] + errors: List["DeleteUsersResponse.Error"] + + class UserInfo(AirtableModel): + id: str + email: str + + class Error(AirtableModel): + type: str + email: str + message: Optional[str] = None + + +class ManageUsersResponse(AirtableModel): + """ + Returned from the `Manage user membership `__, + `Grant admin access `__, and + `Revoke admin access `__ + endpoints. + """ + + errors: List["ManageUsersResponse.Error"] = pydantic.Field(default_factory=list) + + class Error(AirtableModel): + id: Optional[str] = None + email: Optional[str] = None + type: str + message: str + + +class MoveError(AirtableModel): + id: str + type: str + message: str + + +class MoveGroupsResponse(AirtableModel): + """ + Returned by `Move user groups `__. + """ + + moved_groups: List[NestedId] = pydantic.Field(default_factory=list) + errors: List[MoveError] = pydantic.Field(default_factory=list) + + +class MoveWorkspacesResponse(AirtableModel): + """ + Returned by `Move workspaces `__. + """ + + moved_workspaces: List[NestedId] = pydantic.Field(default_factory=list) + errors: List[MoveError] = pydantic.Field(default_factory=list) + + +rebuild_models(vars()) diff --git a/pyairtable/api/params.py b/pyairtable/api/params.py index 55d6464e..0d26bdfd 100644 --- a/pyairtable/api/params.py +++ b/pyairtable/api/params.py @@ -1,10 +1,6 @@ from typing import Any, Dict, List, Tuple - -class InvalidParamException(ValueError): - """ - Raised when invalid parameters are passed to ``all()``, ``first()``, etc. - """ +from pyairtable.exceptions import InvalidParameterError def dict_list_to_request_params( @@ -12,7 +8,7 @@ def dict_list_to_request_params( values: List[Dict[str, str]], ) -> Dict[str, str]: """ - Returns dict to be used by request params from dict list + Build the dict to be used by request params from dict list Expected Airtable Url Params is: `?sort[0][field]=FieldOne&sort[0][direction]=asc` @@ -70,7 +66,7 @@ def field_names_to_sorting_dict(field_names: List[str]) -> List[Dict[str, str]]: "max_records": "maxRecords", "offset": "offset", "page_size": "pageSize", - "return_fields_by_field_id": "returnFieldsByFieldId", + "use_field_ids": "returnFieldsByFieldId", "sort": "sort", "time_zone": "timeZone", "user_locale": "userLocale", @@ -78,6 +74,9 @@ def field_names_to_sorting_dict(field_names: List[str]) -> List[Dict[str, str]]: # get webhook payloads "limit": "limit", "cursor": "cursor", + # get audit log events + "next": "next", + "previous": "previous", } @@ -85,17 +84,31 @@ def _option_to_param(name: str) -> str: try: return OPTIONS_TO_PARAMETERS[name] except KeyError: - raise InvalidParamException(name) + raise InvalidParameterError(name) #: List of option names that cannot be passed via POST, only GET #: See https://github.com/gtalarico/pyairtable/pull/210#discussion_r1046014885 OPTIONS_NOT_SUPPORTED_VIA_POST = ("user_locale", "time_zone") +#: Mapping of option names to their recordMetadata values +#: These options are converted to the recordMetadata array parameter +OPTIONS_TO_RECORD_METADATA = { + "count_comments": "commentCount", +} + + +def _build_record_metadata(options: Dict[str, Any]) -> List[str]: + return [ + metadata_value + for option_name, metadata_value in OPTIONS_TO_RECORD_METADATA.items() + if options.get(option_name) + ] + def options_to_params(options: Dict[str, Any]) -> Dict[str, Any]: """ - Converts Airtable options to a dict of query params. + Convert Airtable options to a dict of query params. Args: options: A dict of Airtable-specific options. See :ref:`parameters`. @@ -103,7 +116,11 @@ def options_to_params(options: Dict[str, Any]) -> Dict[str, Any]: Returns: A dict of query parameters that can be passed to the ``requests`` library. """ - params = {_option_to_param(name): value for (name, value) in options.items()} + params = { + _option_to_param(name): value + for (name, value) in options.items() + if name not in OPTIONS_TO_RECORD_METADATA + } if "fields" in params: params["fields[]"] = params.pop("fields") @@ -112,15 +129,17 @@ def options_to_params(options: Dict[str, Any]) -> Dict[str, Any]: if "sort" in params: sorting_dict_list = field_names_to_sorting_dict(params.pop("sort")) params.update(dict_list_to_request_params("sort", sorting_dict_list)) + if record_metadata := _build_record_metadata(options): + params["recordMetadata[]"] = record_metadata return params def options_to_json_and_params( - options: Dict[str, Any] + options: Dict[str, Any], ) -> Tuple[Dict[str, Any], Dict[str, Any]]: """ - Converts Airtable options to a JSON payload with (possibly) leftover query params. + Convert Airtable options to a JSON payload with (possibly) leftover query params. Args: options: A dict of Airtable-specific options. See :ref:`parameters`. @@ -132,6 +151,7 @@ def options_to_json_and_params( _option_to_param(name): value for (name, value) in options.items() if name not in OPTIONS_NOT_SUPPORTED_VIA_POST + and name not in OPTIONS_TO_RECORD_METADATA } params = { _option_to_param(name): value @@ -143,5 +163,7 @@ def options_to_json_and_params( json["returnFieldsByFieldId"] = bool(json["returnFieldsByFieldId"]) if "sort" in json: json["sort"] = field_names_to_sorting_dict(json.pop("sort")) + if record_metadata := _build_record_metadata(options): + json["recordMetadata"] = record_metadata return (json, params) diff --git a/pyairtable/api/retrying.py b/pyairtable/api/retrying.py index 43f78717..893714bb 100644 --- a/pyairtable/api/retrying.py +++ b/pyairtable/api/retrying.py @@ -18,7 +18,7 @@ def retry_strategy( **kwargs: Any, ) -> Retry: """ - Creates a `Retry `_ + Create a `Retry `_ instance with adjustable default values. :class:`~pyairtable.Api` accepts this via the ``retry_strategy=`` parameter. @@ -74,5 +74,5 @@ def __init__(self, retry_strategy: Retry): __all__ = [ "Retry", - "_RetryingSession", + "retry_strategy", ] diff --git a/pyairtable/api/table.py b/pyairtable/api/table.py index b5cb717f..b84a0fe9 100644 --- a/pyairtable/api/table.py +++ b/pyairtable/api/table.py @@ -1,21 +1,43 @@ -import posixpath +import base64 +import mimetypes +import os import urllib.parse import warnings -from typing import Any, Iterator, List, Optional, Union, overload +from functools import cached_property +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + Iterator, + List, + Optional, + Union, + overload, +) import pyairtable.models -from pyairtable.api.retrying import Retry from pyairtable.api.types import ( FieldName, RecordDeletedDict, RecordDict, RecordId, UpdateRecordDict, + UploadAttachmentResultDict, UpsertResultDict, WritableFields, assert_typed_dict, assert_typed_dicts, ) +from pyairtable.formulas import Formula, to_formula_str +from pyairtable.models.schema import FieldSchema, TableSchema, parse_field_schema +from pyairtable.utils import Url, UrlBuilder, is_table_id + +if TYPE_CHECKING: + from pyairtable.api.api import Api, TimeoutTuple + from pyairtable.api.base import Base + from pyairtable.api.retrying import Retry class Table: @@ -29,11 +51,44 @@ class Table: """ #: The base that this table belongs to. - base: "pyairtable.api.base.Base" + base: "Base" #: Can be either the table name or the table ID (``tblXXXXXXXXXXXXXX``). name: str + # Cached schema information to reduce API calls + _schema: Optional[TableSchema] = None + + class _urls(UrlBuilder): + #: URL for retrieving all records in the table + records = Url("{base.id}/{self.id_or_name}") + + #: URL for retrieving all records in the table via POST, + #: when the request is too large to fit into GET parameters. + records_post = records / "listRecords" + fields = Url("meta/bases/{base.id}/tables/{self.id_or_name}/fields") + + def record(self, record_id: RecordId) -> Url: + """ + URL for a specific record in the table. + """ + return self.records / record_id + + def record_comments(self, record_id: RecordId) -> Url: + """ + URL for comments on a specific record in the table. + """ + return self.record(record_id) / "comments" + + def upload_attachment(self, record_id: RecordId, field: str) -> Url: + """ + URL for uploading an attachment to a specific field in a specific record. + """ + url = self.build_url(f"{{base.id}}/{record_id}/{field}/uploadAttachment") + return url.replace_url(netloc="content.airtable.com") + + urls = cached_property(_urls) + @overload def __init__( self, @@ -41,26 +96,32 @@ def __init__( base_id: str, table_name: str, *, - timeout: Optional["pyairtable.api.api.TimeoutTuple"] = None, - retry_strategy: Optional[Retry] = None, + timeout: Optional["TimeoutTuple"] = None, + retry_strategy: Optional["Retry"] = None, endpoint_url: str = "https://api.airtable.com", - ): - ... + ): ... @overload def __init__( self, api_key: None, - base_id: "pyairtable.api.base.Base", + base_id: "Base", table_name: str, - ): - ... + ): ... + + @overload + def __init__( + self, + api_key: None, + base_id: "Base", + table_name: TableSchema, + ): ... def __init__( self, api_key: Union[None, str], - base_id: Union["pyairtable.api.base.Base", str], - table_name: str, + base_id: Union["Base", str], + table_name: Union[str, TableSchema], **kwargs: Any, ): """ @@ -91,44 +152,87 @@ def __init__( stacklevel=2, ) api = pyairtable.api.api.Api(api_key, **kwargs) - base = api.base(base_id) - elif api_key is None and isinstance(base_id, pyairtable.api.base.Base): - base = base_id + self.base = api.base(base_id) + elif api_key is None and isinstance(base := base_id, pyairtable.api.base.Base): + self.base = base else: raise TypeError( - "Table() expects either (str, str, str) or (None, Base, str);" + "Table() expects (None, Base, str | TableSchema);" f" got ({type(api_key)}, {type(base_id)}, {type(table_name)})" ) - self.base = base - self.name = table_name + if isinstance(table_name, str): + self.name = table_name + elif isinstance(schema := table_name, TableSchema): + self._schema = schema + self.name = schema.name + else: + raise TypeError( + "Table() expects (None, Base, str | TableSchema);" + f" got ({type(api_key)}, {type(base_id)}, {type(table_name)})" + ) def __repr__(self) -> str: - return f"
" + if self._schema: + return f"
" + return f"
" @property - def url(self) -> str: + def id(self) -> str: """ - Returns the URL for this table. + Get the table's Airtable ID. + + If the instance was created with a name rather than an ID, this property will perform + an API request to retrieve the base's schema. For example: + + .. code-block:: python + + # This will not create any network traffic + >>> table = base.table('tbl00000000000123') + >>> table.id + 'tbl00000000000123' + + # This will fetch schema for the base when `table.id` is called + >>> table = base.table('Table Name') + >>> table.id + 'tbl00000000000123' """ - return self.api.build_url(self.base.id, urllib.parse.quote(self.name, safe="")) + if is_table_id(self.name): + return self.name + return self.schema().id - def record_url(self, record_id: RecordId, *components: str) -> str: + @property + def id_or_name(self, quoted: bool = True) -> str: """ - Returns the URL for the given record ID, with optional trailing components. + Return the table ID if it is known, otherwise the table name used for the constructor. + This is the URL component used to identify the table in Airtable's API. + + Args: + quoted: Whether to return a URL-encoded value. + + Usage: + + >>> table = base.table("Apartments") + >>> table.id_or_name + 'Apartments' + >>> table.schema() + >>> table.id_or_name + 'tblXXXXXXXXXXXXXX' """ - return posixpath.join(self.url, record_id, *components) + value = self._schema.id if self._schema else self.name + value = value if not quoted else urllib.parse.quote(value, safe="") + return value @property - def api(self) -> "pyairtable.api.api.Api": + def api(self) -> "Api": """ - Returns the same API connection as table's :class:`~pyairtable.Base`. + The API connection used by the table's :class:`~pyairtable.Base`. """ return self.base.api def get(self, record_id: RecordId, **options: Any) -> RecordDict: """ - Retrieves a record by its ID. + Retrieve a record by its ID. >>> table.get('recwPQIfs4wKPyc9D') {'id': 'recwPQIfs4wKPyc9D', 'fields': {'First Name': 'John', 'Age': 21}} @@ -140,14 +244,16 @@ def get(self, record_id: RecordId, **options: Any) -> RecordDict: cell_format: |kwarg_cell_format| time_zone: |kwarg_time_zone| user_locale: |kwarg_user_locale| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| """ - record = self.api.request("get", self.record_url(record_id), options=options) + if self.api.use_field_ids: + options.setdefault("use_field_ids", self.api.use_field_ids) + record = self.api.get(self.urls.record(record_id), options=options) return assert_typed_dict(RecordDict, record) def iterate(self, **options: Any) -> Iterator[List[RecordDict]]: """ - Iterates through each page of results from `List records `_. + Iterate through each page of results from `List records `_. To get all records at once, use :meth:`all`. >>> it = table.iterate() @@ -171,19 +277,24 @@ def iterate(self, **options: Any) -> Iterator[List[RecordDict]]: cell_format: |kwarg_cell_format| user_locale: |kwarg_user_locale| time_zone: |kwarg_time_zone| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| + count_comments: |kwarg_count_comments| """ + if isinstance(formula := options.get("formula"), Formula): + options["formula"] = to_formula_str(formula) + if self.api.use_field_ids: + options.setdefault("use_field_ids", self.api.use_field_ids) for page in self.api.iterate_requests( method="get", - url=self.url, - fallback=("post", f"{self.url}/listRecords"), + url=self.urls.records, + fallback=("post", self.urls.records_post), options=options, ): yield assert_typed_dicts(RecordDict, page.get("records", [])) def all(self, **options: Any) -> List[RecordDict]: """ - Retrieves all matching records in a single list. + Retrieve all matching records in a single list. >>> table = api.table('base_id', 'table_name') >>> table.all(view='MyView', fields=['ColA', '-ColB']) @@ -201,13 +312,14 @@ def all(self, **options: Any) -> List[RecordDict]: cell_format: |kwarg_cell_format| user_locale: |kwarg_user_locale| time_zone: |kwarg_time_zone| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| + count_comments: |kwarg_count_comments| """ return [record for page in self.iterate(**options) for record in page] def first(self, **options: Any) -> Optional[RecordDict]: """ - Retrieves the first matching record. + Retrieve the first matching record. Returns ``None`` if no records are returned. This is similar to :meth:`~pyairtable.Table.all`, except @@ -221,7 +333,8 @@ def first(self, **options: Any) -> Optional[RecordDict]: cell_format: |kwarg_cell_format| user_locale: |kwarg_user_locale| time_zone: |kwarg_time_zone| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| + count_comments: |kwarg_count_comments| """ options.update(dict(page_size=1, max_records=1)) for page in self.iterate(**options): @@ -233,10 +346,10 @@ def create( self, fields: WritableFields, typecast: bool = False, - return_fields_by_field_id: bool = False, + use_field_ids: Optional[bool] = None, ) -> RecordDict: """ - Creates a new record + Create a new record >>> record = {'Name': 'John'} >>> table = api.table('base_id', 'table_name') @@ -245,27 +358,28 @@ def create( Args: fields: Fields to insert. Must be a dict with field names or IDs as keys. typecast: |kwarg_typecast| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| """ - created = self.api.request( - method="post", - url=self.url, + if use_field_ids is None: + use_field_ids = self.api.use_field_ids + created = self.api.post( + url=self.urls.records, json={ "fields": fields, "typecast": typecast, - "returnFieldsByFieldId": return_fields_by_field_id, + "returnFieldsByFieldId": use_field_ids, }, ) return assert_typed_dict(RecordDict, created) def batch_create( self, - records: List[WritableFields], + records: Iterable[WritableFields], typecast: bool = False, - return_fields_by_field_id: bool = False, + use_field_ids: Optional[bool] = None, ) -> List[RecordDict]: """ - Creats a number of new records in batches. + Create a number of new records in batches. >>> table.batch_create([{'Name': 'John'}, {'Name': 'Marc'}]) [ @@ -282,21 +396,25 @@ def batch_create( ] Args: - records: List of dicts representing records to be created. + records: Iterable of dicts representing records to be created. typecast: |kwarg_typecast| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| """ inserted_records = [] + if use_field_ids is None: + use_field_ids = self.api.use_field_ids + + # If we got an iterator, exhaust it and collect it into a list. + records = list(records) for chunk in self.api.chunked(records): new_records = [{"fields": fields} for fields in chunk] - response = self.api.request( - method="post", - url=self.url, + response = self.api.post( + url=self.urls.records, json={ "records": new_records, "typecast": typecast, - "returnFieldsByFieldId": return_fields_by_field_id, + "returnFieldsByFieldId": use_field_ids, }, ) inserted_records += assert_typed_dicts(RecordDict, response["records"]) @@ -309,9 +427,10 @@ def update( fields: WritableFields, replace: bool = False, typecast: bool = False, + use_field_ids: Optional[bool] = None, ) -> RecordDict: """ - Updates a particular record ID with the given fields. + Update a particular record ID with the given fields. >>> table.update('recwPQIfs4wKPyc9D', {"Age": 21}) {'id': 'recwPQIfs4wKPyc9D', 'fields': {'First Name': 'John', 'Age': 21}} @@ -323,46 +442,58 @@ def update( fields: Fields to update. Must be a dict with column names or IDs as keys. replace: |kwarg_replace| typecast: |kwarg_typecast| + use_field_ids: |kwarg_use_field_ids| """ + if use_field_ids is None: + use_field_ids = self.api.use_field_ids method = "put" if replace else "patch" updated = self.api.request( method=method, - url=self.record_url(record_id), - json={"fields": fields, "typecast": typecast}, + url=self.urls.record(record_id), + json={ + "fields": fields, + "typecast": typecast, + "returnFieldsByFieldId": use_field_ids, + }, ) return assert_typed_dict(RecordDict, updated) def batch_update( self, - records: List[UpdateRecordDict], + records: Iterable[UpdateRecordDict], replace: bool = False, typecast: bool = False, - return_fields_by_field_id: bool = False, + use_field_ids: Optional[bool] = None, ) -> List[RecordDict]: """ - Updates several records in batches. + Update several records in batches. Args: records: Records to update. replace: |kwarg_replace| typecast: |kwarg_typecast| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| Returns: The list of updated records. """ updated_records = [] method = "put" if replace else "patch" + if use_field_ids is None: + use_field_ids = self.api.use_field_ids + + # If we got an iterator, exhaust it and collect it into a list. + records = list(records) for chunk in self.api.chunked(records): chunk_records = [{"id": x["id"], "fields": x["fields"]} for x in chunk] response = self.api.request( method=method, - url=self.url, + url=self.urls.records, json={ "records": chunk_records, "typecast": typecast, - "returnFieldsByFieldId": return_fields_by_field_id, + "returnFieldsByFieldId": use_field_ids, }, ) updated_records += assert_typed_dicts(RecordDict, response["records"]) @@ -371,14 +502,14 @@ def batch_update( def batch_upsert( self, - records: List[UpdateRecordDict], + records: Iterable[Dict[str, Any]], key_fields: List[FieldName], replace: bool = False, typecast: bool = False, - return_fields_by_field_id: bool = False, + use_field_ids: Optional[bool] = None, ) -> UpsertResultDict: """ - Updates or creates records in batches, either using ``id`` (if given) or using a set of + Update or create records in batches, either using ``id`` (if given) or using a set of fields (``key_fields``) to look for matches. For more information on how this operation behaves, see Airtable's API documentation for `Update multiple records `__. @@ -390,11 +521,17 @@ def batch_upsert( records in the input with existing records on the server. replace: |kwarg_replace| typecast: |kwarg_typecast| - return_fields_by_field_id: |kwarg_return_fields_by_field_id| + use_field_ids: |kwarg_use_field_ids| Returns: Lists of created/updated record IDs, along with the list of all records affected. """ + if use_field_ids is None: + use_field_ids = self.api.use_field_ids + + # If we got an iterator, exhaust it and collect it into a list. + records = list(records) + # The API will reject a request where a record is missing any of fieldsToMergeOn, # but we might not reach that error until we've done several batch operations. # To spare implementers from having to recover from a partially applied upsert, @@ -420,11 +557,11 @@ def batch_upsert( ] response = self.api.request( method=method, - url=self.url, + url=self.urls.records, json={ "records": formatted_records, "typecast": typecast, - "returnFieldsByFieldId": return_fields_by_field_id, + "returnFieldsByFieldId": use_field_ids, "performUpsert": {"fieldsToMergeOn": key_fields}, }, ) @@ -438,7 +575,7 @@ def batch_upsert( def delete(self, record_id: RecordId) -> RecordDeletedDict: """ - Deletes the given record. + Delete the given record. >>> table.delete('recwPQIfs4wKPyc9D') {'id': 'recwPQIfs4wKPyc9D', 'deleted': True} @@ -451,12 +588,12 @@ def delete(self, record_id: RecordId) -> RecordDeletedDict: """ return assert_typed_dict( RecordDeletedDict, - self.api.request("delete", self.record_url(record_id)), + self.api.delete(self.urls.record(record_id)), ) - def batch_delete(self, record_ids: List[RecordId]) -> List[RecordDeletedDict]: + def batch_delete(self, record_ids: Iterable[RecordId]) -> List[RecordDeletedDict]: """ - Deletes the given records, operating in batches. + Delete the given records, operating in batches. >>> table.batch_delete(['recwPQIfs4wKPyc9D', 'recwDxIfs3wDPyc3F']) [ @@ -472,15 +609,18 @@ def batch_delete(self, record_ids: List[RecordId]) -> List[RecordDeletedDict]: """ deleted_records = [] + # If we got an iterator, exhaust it and collect it into a list. + record_ids = list(record_ids) + for chunk in self.api.chunked(record_ids): - result = self.api.request("delete", self.url, params={"records[]": chunk}) + result = self.api.delete(self.urls.records, params={"records[]": chunk}) deleted_records += assert_typed_dicts(RecordDeletedDict, result["records"]) return deleted_records def comments(self, record_id: RecordId) -> List["pyairtable.models.Comment"]: """ - Returns a list of comments on the given record. + Retrieve all comments on the given record. Usage: >>> table = Api.table("appNxslc6jG0XedVM", "tblslc6jG0XedVMNx") @@ -489,7 +629,7 @@ def comments(self, record_id: RecordId) -> List["pyairtable.models.Comment"]: Comment( id='comdVMNxslc6jG0Xe', text='Hello, @[usrVMNxslc6jG0Xed]!', - created_time='2023-06-07T17:46:24.435891', + created_time=datetime.datetime(...), last_updated_time=None, mentioned={ 'usrVMNxslc6jG0Xed': Mentioned( @@ -510,13 +650,10 @@ def comments(self, record_id: RecordId) -> List["pyairtable.models.Comment"]: Args: record_id: |arg_record_id| """ - url = self.record_url(record_id, "comments") + url = self.urls.record_comments(record_id) + ctx = {"record_url": self.urls.record(record_id)} return [ - pyairtable.models.Comment.from_api( - api=self.api, - url=self.record_url(record_id, "comments", comment["id"]), - obj=comment, - ) + pyairtable.models.Comment.from_api(comment, self.api, context=ctx) for page in self.api.iterate_requests("GET", url) for comment in page["comments"] ] @@ -527,7 +664,7 @@ def add_comment( text: str, ) -> "pyairtable.models.Comment": """ - Creates a comment on a record. + Create a comment on a record. See `Create comment `_ for details. Usage: @@ -541,15 +678,143 @@ def add_comment( record_id: |arg_record_id| text: The text of the comment. Use ``@[usrIdentifier]`` to mention users. """ - url = self.record_url(record_id, "comments") - response = self.api.request("POST", url, json={"text": text}) + url = self.urls.record_comments(record_id) + response = self.api.post(url, json={"text": text}) return pyairtable.models.Comment.from_api( - api=self.api, - url=self.record_url(record_id, "comments", response["id"]), - obj=response, + response, self.api, context={"record_url": self.urls.record(record_id)} + ) + + def schema(self, *, force: bool = False) -> TableSchema: + """ + Retrieve the schema of the current table. + + Usage: + >>> table.schema() + TableSchema( + id='tblslc6jG0XedVMNx', + name='My Table', + primary_field_id='fld6jG0XedVMNxFQW', + fields=[...], + views=[...] + ) + + Args: + force: |kwarg_force_metadata| + """ + if force or not self._schema: + self._schema = self.base.schema(force=force).table(self.name) + return self._schema + + def create_field( + self, + name: str, + field_type: str, + description: Optional[str] = None, + options: Optional[Dict[str, Any]] = None, + ) -> FieldSchema: + """ + Create a field on the table. + + Usage: + >>> table.create_field("Attachments", "multipleAttachment") + FieldSchema( + id='fldslc6jG0XedVMNx', + name='Attachments', + type='multipleAttachment', + description=None, + options=MultipleAttachmentsFieldOptions(is_reversed=False) + ) + + Args: + name: The unique name of the field. + field_type: One of the `Airtable field types `__. + description: A long form description of the table. + options: Only available for some field types. For more information, read about the + `Airtable field model `__. + """ + request: Dict[str, Any] = {"name": name, "type": field_type} + if description: + request["description"] = description + if options: + request["options"] = options + response = self.api.post(self.urls.fields, json=request) + # This hopscotch ensures that the FieldSchema object we return has an API and a URL, + # and that developers don't need to reload our schema to be able to access it. + field_schema = parse_field_schema(response) + field_schema._set_api( + self.api, + context={ + "base": self.base, + "table_schema": self._schema or self, + }, ) + if self._schema: + self._schema.fields.append(field_schema) + return field_schema + + def upload_attachment( + self, + record_id: RecordId, + field: str, + filename: Union[str, Path], + content: Optional[Union[str, bytes]] = None, + content_type: Optional[str] = None, + ) -> UploadAttachmentResultDict: + """ + Upload an attachment to the Airtable API, either by supplying the path to the file + or by providing the content directly as a variable. + See `Upload attachment `__. -# These are at the bottom of the module to avoid circular imports -import pyairtable.api.api # noqa -import pyairtable.api.base # noqa + Usage: + >>> table.upload_attachment("recAdw9EjV90xbZ", "Attachments", "/tmp/example.jpg") + { + 'id': 'recAdw9EjV90xbZ', + 'createdTime': '2023-05-22T21:24:15.333134Z', + 'fields': { + 'Attachments': [ + { + 'id': 'attW8eG2x0ew1Af', + 'url': 'https://content.airtable.com/...', + 'filename': 'example.jpg' + } + ] + } + } + + Args: + record_id: |arg_record_id| + field: The ID or name of the ``multipleAttachments`` type field. + filename: The path to the file to upload. If ``content`` is provided, this + argument is still used to tell Airtable what name to give the file. + content: The content of the file as a string or bytes object. If no value + is provided, pyAirtable will attempt to read the contents of ``filename``. + content_type: The MIME type of the file. If not provided, the library will attempt to + guess the content type based on ``filename``. + + Returns: + A full list of attachments in the given field, including the new attachment. + """ + if content is None: + with open(filename, "rb") as fp: + content = fp.read() + return self.upload_attachment( + record_id, field, filename, content, content_type + ) + + filename = os.path.basename(filename) + if content_type is None: + if not (content_type := mimetypes.guess_type(filename)[0]): + warnings.warn(f"Could not guess content-type for {filename!r}") + content_type = "application/octet-stream" + + # TODO: figure out how to handle the atypical subdomain in a more graceful fashion + url = self.urls.upload_attachment(record_id, field) + content = content.encode() if isinstance(content, str) else content + payload = { + "contentType": content_type, + "filename": filename, + "file": base64.encodebytes(content).decode("utf8"), # API needs Unicode + } + response = self.api.post(url, json=payload) + return assert_typed_dict(UploadAttachmentResultDict, response) diff --git a/pyairtable/api/types.py b/pyairtable/api/types.py index 84878820..d2efcd44 100644 --- a/pyairtable/api/types.py +++ b/pyairtable/api/types.py @@ -2,13 +2,13 @@ pyAirtable provides a number of type aliases and TypedDicts which are used as inputs and return values to various pyAirtable methods. """ + from functools import lru_cache from typing import Any, Dict, List, Optional, Type, TypeVar, Union, cast +import pydantic from typing_extensions import Required, TypeAlias, TypedDict -from pyairtable._compat import pydantic - T = TypeVar("T") #: An alias for ``str`` used internally for disambiguation. @@ -25,6 +25,29 @@ FieldName: TypeAlias = str +class NestedIdDict(TypedDict): + id: str + + +class AITextDict(TypedDict, total=False): + """ + A ``dict`` representing text generated by AI. + + >>> record = table.get('recW8eG2x0ew1Af') + >>> record['fields']['Generated Text'] + { + 'state': 'generated', + 'isStale': False, + 'value': '...' + } + """ + + state: Required[str] + isStale: Required[bool] + value: Required[Optional[str]] + errorType: str + + class AttachmentDict(TypedDict, total=False): """ A ``dict`` representing an attachment stored in an Attachments field. @@ -52,7 +75,20 @@ class AttachmentDict(TypedDict, total=False): thumbnails: Dict[str, Dict[str, Union[str, int]]] -class CreateAttachmentDict(TypedDict, total=False): +class CreateAttachmentById(TypedDict): + """ + A ``dict`` representing a new attachment to be written to the Airtable API. + + >>> new_attachment = {"id": "attW8eG2x0ew1Af"} + >>> existing = record["fields"].setdefault("Attachments", []) + >>> existing.append(new_attachment) + >>> table.update(existing["id"], existing["fields"]) + """ + + id: str + + +class CreateAttachmentByUrl(TypedDict, total=False): """ A ``dict`` representing a new attachment to be written to the Airtable API. @@ -69,6 +105,9 @@ class CreateAttachmentDict(TypedDict, total=False): filename: str +CreateAttachmentDict: TypeAlias = Union[CreateAttachmentById, CreateAttachmentByUrl] + + class BarcodeDict(TypedDict, total=False): """ A ``dict`` representing the value stored in a Barcode field. @@ -160,6 +199,29 @@ class CollaboratorEmailDict(TypedDict): email: str +class AddUserCollaboratorDict(TypedDict): + """ + Used to add a user as a collaborator to a base, workspace, or interface. + """ + + user: NestedIdDict + permissionLevel: str + + +class AddGroupCollaboratorDict(TypedDict): + """ + Used to add a group as a collaborator to a base, workspace, or interface. + """ + + group: NestedIdDict + permissionLevel: str + + +AddCollaboratorDict: TypeAlias = Union[ + AddUserCollaboratorDict, AddGroupCollaboratorDict +] + + #: Represents the types of values that we might receive from the API. #: At present, is an alias for ``Any`` because we don't want to lose #: forward compatibility with any changes Airtable makes in the future. @@ -192,7 +254,7 @@ class CollaboratorEmailDict(TypedDict): WritableFields: TypeAlias = Dict[FieldName, WritableFieldValue] -class RecordDict(TypedDict): +class RecordDict(TypedDict, total=False): """ A ``dict`` representing a record returned from the Airtable API. See `List records `__. @@ -204,11 +266,20 @@ class RecordDict(TypedDict): 'createdTime': '2023-05-22T21:24:15.333134Z', 'fields': {'Name': 'Alice', 'Department': 'Engineering'} } + + >>> table.first(count_comments=True) + { + 'id': 'recAdw9EjV90xbW', + 'createdTime': '2023-05-22T21:24:15.333134Z', + 'fields': {'Name': 'Alice'}, + 'commentCount': 5 + } """ - id: RecordId - createdTime: Timestamp - fields: Fields + id: Required[RecordId] + createdTime: Required[Timestamp] + fields: Required[Fields] + commentCount: int class CreateRecordDict(TypedDict): @@ -256,6 +327,9 @@ class UpdateRecordDict(TypedDict): fields: WritableFields +AnyRecordDict: TypeAlias = Union[RecordDict, CreateRecordDict, UpdateRecordDict] + + class RecordDeletedDict(TypedDict): """ A ``dict`` representing the payload returned by the Airtable API to confirm a deletion. @@ -303,14 +377,40 @@ class UserAndScopesDict(TypedDict, total=False): scopes: List[str] +class UploadAttachmentResultDict(TypedDict): + """ + A ``dict`` representing the payload returned by + `Upload attachment `__. + + Usage: + >>> table.upload_attachment("recAdw9EjV90xbZ", "Attachments", "/tmp/example.jpg") + { + 'id': 'recAdw9EjV90xbZ', + 'createdTime': '2023-05-22T21:24:15.333134Z', + 'fields': { + 'Attachments': [ + { + 'id': 'attW8eG2x0ew1Af', + 'url': 'https://content.airtable.com/...', + 'filename': 'example.jpg' + } + ] + } + } + """ + + id: RecordId + createdTime: str + fields: Dict[str, List[AttachmentDict]] + + @lru_cache -def _create_model_from_typeddict(cls: Type[T]) -> Type[pydantic.BaseModel]: +def _create_model_from_typeddict(cls: Type[T]) -> pydantic.TypeAdapter[Any]: """ - Creates a pydantic model from a TypedDict to use as a validator. + Create a pydantic model from a TypedDict to use as a validator. Memoizes the result so we don't have to call this more than once per class. """ - # Mypy can't tell that we are using pydantic v1. - return pydantic.create_model_from_typeddict(cls) # type: ignore[no-any-return, operator, unused-ignore] + return pydantic.TypeAdapter(cls) def assert_typed_dict(cls: Type[T], obj: Any) -> T: @@ -350,9 +450,21 @@ def assert_typed_dict(cls: Type[T], obj: Any) -> T: """ if not isinstance(obj, dict): raise TypeError(f"expected dict, got {type(obj)}") + + # special case for handling a Union + if getattr(cls, "__origin__", None) is Union: + typeddict_classes = list(getattr(cls, "__args__", [])) + while typeddict_cls := typeddict_classes.pop(): + try: + return cast(T, assert_typed_dict(typeddict_cls, obj)) + except pydantic.ValidationError: + # raise the last exception if we've tried everything + if not typeddict_classes: + raise + # mypy complains cls isn't Hashable, but it is; see https://github.com/python/mypy/issues/2412 - model = _create_model_from_typeddict(cls) # type: ignore - model(**obj) + model = _create_model_from_typeddict(cls) # type: ignore[arg-type] + model.validate_python(obj) return cast(T, obj) @@ -371,7 +483,7 @@ def assert_typed_dicts(cls: Type[T], objects: Any) -> List[T]: def is_airtable_error(obj: Any) -> bool: """ - Returns whether the given object represents an Airtable error. + Determine whether the given object represents an Airtable error. """ if isinstance(obj, dict): return set(obj) in ({"error"}, {"specialValue"}) diff --git a/pyairtable/api/workspace.py b/pyairtable/api/workspace.py new file mode 100644 index 00000000..7a63763c --- /dev/null +++ b/pyairtable/api/workspace.py @@ -0,0 +1,129 @@ +from functools import cached_property +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union + +from pyairtable.models.schema import WorkspaceCollaborators +from pyairtable.utils import Url, UrlBuilder, cache_unless_forced, enterprise_only + +if TYPE_CHECKING: + from pyairtable.api.api import Api + from pyairtable.api.base import Base + + +class Workspace: + """ + Represents an Airtable workspace, which contains a number of bases + and its own set of collaborators. + + >>> ws = api.workspace("wspmhESAta6clCCwF") + >>> ws.collaborators().name + 'my first workspace' + >>> ws.create_base("Base Name", tables=[...]) + + + Most workspace functionality is limited to users on Enterprise billing plans. + """ + + _collaborators: Optional[WorkspaceCollaborators] = None + + class _urls(UrlBuilder): + #: URL for retrieving the workspace's metadata and collaborators. + meta = Url("meta/workspaces/{id}") + + #: URL for moving a base to a new workspace. + move_base = meta / "moveBase" + + #: URL for POST requests that modify collaborations on the workspace. + collaborators = meta / "collaborators" + + urls = cached_property(_urls) + + def __init__(self, api: "Api", workspace_id: str): + self.api = api + self.id = workspace_id + + def create_base( + self, + name: str, + tables: Sequence[Dict[str, Any]], + ) -> "Base": + """ + Create a base in the given workspace. + + See https://airtable.com/developers/web/api/create-base + + Args: + name: The name to give to the new base. Does not need to be unique. + tables: A list of ``dict`` objects that conform to Airtable's + `Table model `__. + """ + url = self.api.urls.bases + payload = {"name": name, "workspaceId": self.id, "tables": list(tables)} + response = self.api.post(url, json=payload) + return self.api.base(response["id"], validate=True, force=True) + + # Everything below here requires .info() and is therefore Enterprise-only + + @enterprise_only + @cache_unless_forced + def collaborators(self) -> WorkspaceCollaborators: + """ + Retrieve basic information, collaborators, and invite links + for the given workspace, caching the result. + + See https://airtable.com/developers/web/api/get-workspace-collaborators + """ + params = {"include": ["collaborators", "inviteLinks"]} + payload = self.api.get(self.urls.meta, params=params) + return WorkspaceCollaborators.from_api(payload, self.api, context=self) + + @enterprise_only + def bases(self) -> List["Base"]: + """ + Retrieve all bases within the workspace. + """ + return [self.api.base(base_id) for base_id in self.collaborators().base_ids] + + @property + @enterprise_only + def name(self) -> str: + """ + The name of the workspace. + """ + return self.collaborators().name + + @enterprise_only + def delete(self) -> None: + """ + Delete the workspace. + + See https://airtable.com/developers/web/api/delete-workspace + + Usage: + >>> ws = api.workspace("wspmhESAta6clCCwF") + >>> ws.delete() + """ + self.api.delete(self.urls.meta) + + @enterprise_only + def move_base( + self, + base: Union[str, "Base"], + target: Union[str, "Workspace"], + index: Optional[int] = None, + ) -> None: + """ + Move the given base to a new workspace. + + See https://airtable.com/developers/web/api/move-base + + Usage: + >>> base = api.base("appCwFmhESAta6clC") + >>> ws = api.workspace("wspmhESAta6clCCwF") + >>> ws.move_base(base, "wspSomeOtherPlace", index=0) + """ + base_id = base if isinstance(base, str) else base.id + target_id = target if isinstance(target, str) else target.id + payload: Dict[str, Any] = {"baseId": base_id, "targetWorkspaceId": target_id} + if index is not None: + payload["targetIndex"] = index + self.api.post(self.urls.move_base, json=payload) diff --git a/pyairtable/cli.py b/pyairtable/cli.py new file mode 100644 index 00000000..dbc51639 --- /dev/null +++ b/pyairtable/cli.py @@ -0,0 +1,427 @@ +""" +pyAirtable exposes a command-line interface that allows you to interact with the API. +""" + +import functools +import json +import os +import re +import sys +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import Any, Callable, Iterator, Optional, Sequence, Tuple, Union + +from click import Context, HelpFormatter +from typing_extensions import ParamSpec, TypeVar + +from pyairtable.api.api import Api +from pyairtable.api.base import Base +from pyairtable.api.enterprise import Enterprise +from pyairtable.api.table import Table +from pyairtable.models._base import AirtableModel +from pyairtable.orm.generate import ModelFileBuilder +from pyairtable.utils import chunked, is_table_id + +try: + import click +except ImportError: # pragma: no cover + print( + "You are missing the 'click' library, which means you did not install\n" + "the optional dependencies required for the pyairtable command line.\n" + "Try again after running:\n\n" + " % pip install 'pyairtable[cli]'", + "\n", + file=sys.stderr, + ) + raise + + +T = TypeVar("T") +F = TypeVar("F", bound=Callable[..., Any]) +P = ParamSpec("P") + + +@dataclass +class CliContext: + access_token: str = "" + base_id: str = "" + table_id_or_name: str = "" + enterprise_id: str = "" + click_context: Optional["click.Context"] = None + + @functools.cached_property + def api(self) -> Api: + return Api(self.access_token) + + @functools.cached_property + def base(self) -> Base: + return self.api.base(self.base_id) + + @functools.cached_property + def table(self) -> Table: + return self.base.table(self.table_id_or_name) + + @functools.cached_property + def enterprise(self) -> Enterprise: + return self.api.enterprise(self.enterprise_id) + + @property + def click(self) -> click.Context: + assert self.click_context is not None + return self.click_context + + def default_subcommand(self, cmd: F) -> None: + if not self.click.invoked_subcommand: + self.click.invoke(cmd) + + +def needs_context(func: Callable[P, T]) -> Callable[P, T]: + @functools.wraps(func) + @click.pass_context + def _wrapped(click_ctx: click.Context, /, *args: P.args, **kwargs: P.kwargs) -> T: + obj = click_ctx.ensure_object(CliContext) + obj.click_context = click_ctx + return click_ctx.invoke(func, obj, *args, **kwargs) + + return _wrapped + + +class ShortcutGroup(click.Group): + """ + A command group that will accept partial command names and complete them. + """ + + def get_command(self, ctx: click.Context, cmd_name: str) -> Optional[click.Command]: + if exact := super().get_command(ctx, cmd_name): + return exact + # If exactly one subcommand starts with the given name, use that. + existing = [cmd for cmd in self.list_commands(ctx) if cmd.startswith(cmd_name)] + if len(existing) == 1: + return super().get_command(ctx, existing[0]) + return None + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + from gettext import gettext as _ + + rows = [ + (name, (command.short_help or command.help or "").strip()) + for name, command in CLI_COMMANDS.items() + ] + col_max = max(len(row[0]) for row in rows) + + with formatter.section(_("Commands")): + formatter.write_dl(rows, col_max=col_max) + + +# fmt: off +@click.group(cls=ShortcutGroup) +@click.option("-k", "--key", help="Your API key.") +@click.option("-kf", "--key-file", type=click.Path(exists=True), help="File containing your API key.") +@click.option("-ke", "--key-env", metavar="VAR", help="Env var containing your API key.") +@click.option("-v", "--verbose", is_flag=True, help="Print verbose output.") +@needs_context +# fmt: on +def cli( + ctx: CliContext, + key: str = "", + key_file: str = "", + key_env: str = "", + verbose: bool = False, +) -> None: + if not any([key, key_file, key_env]): + try: + key_file = os.environ["AIRTABLE_API_KEY_FILE"] + except KeyError: + try: + key = os.environ["AIRTABLE_API_KEY"] + except KeyError: + raise click.UsageError("--key, --key-file, or --key-env required") + + if len([arg for arg in (key, key_file, key_env) if arg]) > 1: + raise click.UsageError("only one of --key, --key-file, --key-env allowed") + + if key_file: + with open(key_file) as inputf: + key = inputf.read().strip() + + if key_env: + key = os.environ[key_env] + + ctx.access_token = key + + +@cli.command() +@needs_context +def whoami(ctx: CliContext) -> None: + """ + Print the current user's information. + """ + _dump(ctx.api.whoami()) + + +@cli.command() +@needs_context +def bases(ctx: CliContext) -> None: + """ + List all available bases. + """ + _dump(ctx.api._base_info().bases) + + +@cli.group(invoke_without_command=True, cls=ShortcutGroup) +@click.argument("base_id") +@needs_context +def base(ctx: CliContext, base_id: str) -> None: + """ + Print information about a base. + """ + ctx.base_id = base_id + ctx.default_subcommand(base_schema) + + +@base.command("schema") +@needs_context +def base_schema(ctx: CliContext) -> None: + """ + Print the base schema. + """ + _dump(ctx.base.schema()) + + +@base.group("table", invoke_without_command=True, cls=ShortcutGroup) +@needs_context +@click.argument("id_or_name") +def base_table(ctx: CliContext, id_or_name: str) -> None: + """ + Print information about a table. + """ + ctx.table_id_or_name = id_or_name + ctx.default_subcommand(base_table_schema) + + +@base_table.command("records") +@needs_context +# fmt: off +@click.option("-f", "--formula", help="Filter records with a formula.") +@click.option("-v", "--view", help="Filter records by a view.") +@click.option("-n", "--limit", "max_records", type=int, help="Limit the number of records returned.") +@click.option("-S", "--sort", help="Sort records by field(s).", multiple=True) +@click.option("-F", "--field", "fields", help="Limit output to certain field(s).", multiple=True) +# fmt: on +def base_table_records( + ctx: CliContext, + formula: Optional[str], + view: Optional[str], + max_records: Optional[int], + fields: Sequence[str], + sort: Sequence[str], +) -> None: + """ + Retrieve records from the table. + """ + fields = list(fields) + sort = list(sort) + _dump( + ctx.table.all( + formula=formula, + view=view, + max_records=max_records, + fields=fields, + sort=sort, + ) + ) + + +@base_table.command("schema") +@needs_context +def base_table_schema(ctx: CliContext) -> None: + """ + Print the table's schema as JSON. + """ + _dump(ctx.table.schema()) + + +@base.command("collaborators") +@needs_context +def base_collaborators(ctx: CliContext) -> None: + """ + Print base collaborators. + """ + _dump(ctx.base.collaborators()) + + +@base.command("shares") +@needs_context +def base_shares(ctx: CliContext) -> None: + """ + Print base shares. + """ + _dump(ctx.base.shares()) + + +@base.command("orm") +@needs_context +@click.option( + "-t", + "--table", + help="Only generate specific table(s).", + metavar="NAME_OR_ID", + multiple=True, +) +def base_orm(ctx: CliContext, table: Sequence[str]) -> None: + """ + Generate a Python ORM module. + """ + table_ids = [t for t in table if is_table_id(t)] + table_names = [t for t in table if not is_table_id(t)] + generator = ModelFileBuilder(ctx.base, table_ids=table_ids, table_names=table_names) + now = datetime.now(timezone.utc).isoformat() + print("# This file was generated by pyAirtable at", now) + print("# Any modifications to this file will be lost if it is rebuilt.") + print() + print(str(generator)) + + +@cli.group(invoke_without_command=True, cls=ShortcutGroup) +@click.argument("enterprise_id") +@needs_context +def enterprise(ctx: CliContext, enterprise_id: str) -> None: + """ + Print information about a user. + """ + ctx.enterprise_id = enterprise_id + ctx.default_subcommand(enterprise_info) + + +@enterprise.command("info") +@needs_context +def enterprise_info(ctx: CliContext) -> None: + """ + Print information about an enterprise. + """ + _dump(ctx.enterprise.info()) + + +@enterprise.command("user") +@needs_context +@click.argument("id_or_email") +def enterprise_user(ctx: CliContext, id_or_email: str) -> None: + """ + Print one user's information. + """ + _dump(ctx.enterprise.user(id_or_email)) + + +@enterprise.command("users") +@needs_context +@click.argument("ids_or_emails", metavar="ID_OR_EMAIL...", nargs=-1) +@click.option("-c", "--collaborations", is_flag=True, help="Include collaborations.") +@click.option("-a", "--all", "all_users", is_flag=True, help="Retrieve all users.") +def enterprise_users( + ctx: CliContext, + ids_or_emails: Sequence[str], + collaborations: bool = False, + all_users: bool = False, +) -> None: + """ + Print many users, keyed by user ID. + """ + if all_users and ids_or_emails: + raise click.UsageError("Cannot combine --all with specific user IDs/emails.") + if all_users: + ids_or_emails = list(ctx.enterprise.info().user_ids) + if not ids_or_emails: + raise click.UsageError("No user IDs or emails provided.") + _dump( + { + user.id: user._raw + for chunk in chunked(ids_or_emails, 100) + for user in ctx.enterprise.users(chunk, collaborations=collaborations) + } + ) + + +@enterprise.command("group") +@needs_context +@click.argument("group_id") +def enterprise_group(ctx: CliContext, group_id: str) -> None: + """ + Print a user group's information. + """ + _dump(ctx.enterprise.group(group_id)) + + +@enterprise.command("groups") +@needs_context +@click.argument("group_ids", metavar="GROUP_ID...", nargs=-1) +@click.option("-a", "--all", "all_groups", is_flag=True, help="Retrieve all groups.") +@click.option("-c", "--collaborations", is_flag=True, help="Include collaborations.") +def enterprise_groups( + ctx: CliContext, + group_ids: Sequence[str], + all_groups: bool = False, + collaborations: bool = False, +) -> None: + """ + Print many groups, keyed by group ID. + """ + if all_groups and group_ids: + raise click.UsageError("Cannot combine --all with specific group IDs.") + if all_groups: + group_ids = list(ctx.enterprise.info().group_ids) + if not group_ids: + raise click.UsageError("No group IDs provided.") + _dump( + { + group.id: group._raw + for group_id in group_ids + if (group := ctx.enterprise.group(group_id, collaborations=collaborations)) + } + ) + + +class JSONEncoder(json.JSONEncoder): + def default(self, o: Any) -> Any: + if isinstance(o, AirtableModel): + return o._raw + return super().default(o) # pragma: no cover + + +def _dump(obj: Any) -> None: + print(json.dumps(obj, cls=JSONEncoder)) + + +def _gather_commands( + command: Union[click.Command, click.Group] = cli, + prefix: str = "", +) -> Iterator[Tuple[str, Union[click.Command, click.Group]]]: + """ + Enumerate through all commands and groups, yielding a 2-tuple of + a human-readable command line and the associated function. + """ + # placeholders for arguments so we make a valid testable command + if command.name != cli.name: + prefix = f"{prefix} {command.name}".strip() + + for param in command.params: + if not isinstance(param, click.Argument): + continue + if param.required or (param.metavar and param.metavar.endswith("...")): + metavar = (param.metavar or param.name or "ARG").upper() + metavar = re.sub(r"\b[A-Z]+_ID", "ID", metavar) + prefix = f"{prefix} {metavar}".strip() + + if not isinstance(command, click.Group): + yield (prefix, command) + return + + for subcommand in command.commands.values(): + yield from _gather_commands(subcommand, prefix=prefix) + + +#: Mapping of command names to their functions. +CLI_COMMANDS = dict(_gather_commands(cli)) + + +if __name__ == "__main__": + cli() # pragma: no cover diff --git a/pyairtable/exceptions.py b/pyairtable/exceptions.py new file mode 100644 index 00000000..69afe1d4 --- /dev/null +++ b/pyairtable/exceptions.py @@ -0,0 +1,40 @@ +class PyAirtableError(Exception): + """ + Base class for all exceptions raised by PyAirtable. + """ + + +class CircularFormulaError(PyAirtableError, RecursionError): + """ + A circular dependency was encountered when flattening nested conditions. + """ + + +class InvalidParameterError(PyAirtableError, ValueError): + """ + Raised when invalid parameters are passed to ``all()``, ``first()``, etc. + """ + + +class MissingValueError(PyAirtableError, ValueError): + """ + A required field received an empty value, either from Airtable or other code. + """ + + +class MultipleValuesError(PyAirtableError, ValueError): + """ + SingleLinkField received more than one value from either Airtable or calling code. + """ + + +class ReadonlyFieldError(PyAirtableError, ValueError): + """ + Attempted to set a value on a readonly field. + """ + + +class UnsavedRecordError(PyAirtableError, ValueError): + """ + Attempted to perform an unsupported operation on an unsaved record. + """ diff --git a/pyairtable/formulas.py b/pyairtable/formulas.py index 3e9e1f50..e69d8968 100644 --- a/pyairtable/formulas.py +++ b/pyairtable/formulas.py @@ -1,220 +1,1158 @@ +""" +This module exports building blocks for constructing Airtable formulas, +including function call proxies for all formula functions as of Dec '23. + +See :doc:`formulas` for more information. +""" + +import datetime import re -from datetime import date, datetime -from typing import Any +import warnings +from decimal import Decimal +from fractions import Fraction +from typing import Any, ClassVar, Iterable, List, Optional, Set, Union + +from typing_extensions import Self as SelfType +from typing_extensions import TypeAlias from pyairtable.api.types import Fields +from pyairtable.exceptions import CircularFormulaError +from pyairtable.utils import date_to_iso_str, datetime_to_iso_str -from .utils import date_to_iso_str, datetime_to_iso_str +class Formula: + """ + Represents an Airtable formula that can be combined with other formulas + or converted to a string. On its own, this class simply wraps a ``str`` + so that it will be not be modified or escaped as if it were a value. -def match(dict_values: Fields, *, match_any: bool = False) -> str: + >>> Formula("{Column} = 1") + Formula('{Column} = 1') + >>> str(_) + '{Column} = 1' """ - Creates one or more ``EQUAL()`` expressions for each provided dict value. - If more than one assetions is included, the expressions are - groupped together into using ``AND()`` (all values must match). - If ``match_any=True``, expressions are grouped with ``OR()``, record is return - if any of the values match. + def __init__(self, value: str) -> None: + self.value = value - This function also handles escaping field names and casting python values - to the appropriate airtable types using :func:`to_airtable_value` on all - provided values to help generate the expected formula syntax. + def __str__(self) -> str: + return self.value - If you need more advanced matching you can build similar expressions using lower - level forumula primitives. + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.value!r})" + def __and__(self, other: Any) -> "Formula": + return AND(self, to_formula(other)) - Args: - dict_values: dictionary containing column names and values + def __or__(self, other: Any) -> "Formula": + return OR(self, to_formula(other)) - Keyword Args: - match_any (``bool``, default: ``False``): - If ``True``, matches if **any** of the provided values match. - Otherwise, all values must match. + def __xor__(self, other: Any) -> "Formula": + return XOR(self, to_formula(other)) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, type(self)): + return False + return other.value == self.value + + def __invert__(self) -> "Formula": + return NOT(self) + + def flatten(self) -> "Formula": + """ + Return a new formula with nested boolean statements flattened. + """ + return self + + def eq(self, value: Any) -> "Comparison": + """ + Build an :class:`~pyairtable.formulas.EQ` comparison using this formula. + """ + return EQ(self, value) + + def ne(self, value: Any) -> "Comparison": + """ + Build an :class:`~pyairtable.formulas.NE` comparison using this formula. + """ + return NE(self, value) + + def gt(self, value: Any) -> "Comparison": + """ + Build a :class:`~pyairtable.formulas.GT` comparison using this formula. + """ + return GT(self, value) + + def lt(self, value: Any) -> "Comparison": + """ + Build an :class:`~pyairtable.formulas.LT` comparison using this formula. + """ + return LT(self, value) + + def gte(self, value: Any) -> "Comparison": + """ + Build a :class:`~pyairtable.formulas.GTE` comparison using this formula. + """ + return GTE(self, value) + + def lte(self, value: Any) -> "Comparison": + """ + Build an :class:`~pyairtable.formulas.LTE` comparison using this formula. + """ + return LTE(self, value) + + +class Field(Formula): + """ + Represents a field name. + """ + + def __str__(self) -> str: + return field_name(self.value) + + +class Comparison(Formula): + """ + Represents a logical condition that compares two expressions. + """ + + operator: ClassVar[str] = "" + + def __init__(self, lval: Any, rval: Any): + if not self.operator: + raise NotImplementedError( + f"{self.__class__.__name__}.operator is not defined" + ) + self.lval = lval + self.rval = rval + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Comparison): + return False + return (self.lval, self.operator, self.rval) == ( + other.lval, + other.operator, + other.rval, + ) + + def __str__(self) -> str: + lval, rval = (to_formula_str(v) for v in (self.lval, self.rval)) + lval = f"({lval})" if isinstance(self.lval, Comparison) else lval + rval = f"({rval})" if isinstance(self.rval, Comparison) else rval + return f"{lval}{self.operator}{rval}" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.lval!r}, {self.rval!r})" + + +class EQ(Comparison): + """ + Produces an ``lval = rval`` formula. + """ + + operator = "=" + + +class NE(Comparison): + """ + Produces an ``lval != rval`` formula. + """ + + operator = "!=" + + +class GT(Comparison): + """ + Produces an ``lval > rval`` formula. + """ + + operator = ">" + + +class GTE(Comparison): + """ + Produces an ``lval >= rval`` formula. + """ + + operator = ">=" + + +class LT(Comparison): + """ + Produces an ``lval < rval`` formula. + """ + + operator = "<" + + +class LTE(Comparison): + """ + Produces an ``lval <= rval`` formula. + """ + + operator = "<=" + + +COMPARISONS_BY_OPERATOR = {cls.operator: cls for cls in (EQ, NE, GT, GTE, LT, LTE)} + + +class Compound(Formula): + """ + Represents a boolean logical operator (AND, OR, etc.) wrapping around + one or more component formulas. + """ + + operator: str + components: List[Formula] + + def __init__( + self, + operator: str, + components: Iterable[Formula], + ) -> None: + if not isinstance(components, list): + components = list(components) + if len(components) == 0: + raise ValueError("Compound() requires at least one component") + + self.operator = operator + self.components = components + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Compound): + return False + return (self.operator, self.components) == (other.operator, other.components) + + def __str__(self) -> str: + joined_components = ", ".join(str(c) for c in self.components) + return f"{self.operator}({joined_components})" + + def __repr__(self) -> str: + return f"{self.operator}({repr(self.components)[1:-1]})" + + def flatten(self, /, memo: Optional[Set[int]] = None) -> "Compound": + """ + Reduces the depth of nested AND, OR, and NOT statements. + """ + memo = memo if memo else set() + memo.add(id(self)) + flattened: List[Formula] = [] + for item in self.components: + if id(item) in memo: + raise CircularFormulaError(item) + if isinstance(item, Compound) and item.operator == self.operator: + flattened.extend(item.flatten(memo=memo).components) + else: + flattened.append(item.flatten()) + + return Compound(self.operator, flattened) + + @classmethod + def build(cls, operator: str, *components: Any, **fields: Any) -> SelfType: + items = list(components) + if len(items) == 1 and hasattr(first := items[0], "__iter__"): + items = [first] if isinstance(first, str) else list(first) + if fields: + items.extend(EQ(Field(k), v) for (k, v) in fields.items()) + return cls(operator, items) + + +def AND(*components: Union[Formula, Iterable[Formula]], **fields: Any) -> Compound: + """ + Join one or more logical conditions into an AND compound condition. + Keyword arguments will be treated as field names. + + >>> AND(EQ("foo", 1), EQ(Field("bar"), 2), baz=3) + AND(EQ('foo', 1), EQ(Field('bar'), 2), EQ(Field('baz'), 3)) + """ + return Compound.build("AND", *components, **fields) + + +def OR(*components: Union[Formula, Iterable[Formula]], **fields: Any) -> Compound: + """ + Join one or more logical conditions into an OR compound condition. + Keyword arguments will be treated as field names. + + >>> OR(EQ("foo", 1), EQ(Field("bar"), 2), baz=3) + OR(EQ('foo', 1), EQ(Field('bar'), 2), EQ(Field('baz'), 3)) + """ + return Compound.build("OR", *components, **fields) + + +def NOT(component: Optional[Formula] = None, /, **fields: Any) -> Compound: + """ + Wrap one logical condition in a negation compound. + Keyword arguments will be treated as field names. + + Can be called with either a formula or with a single + kewyord argument, but not both. + + >>> NOT(EQ("foo", 1)) + NOT(EQ('foo', 1)) + + >>> NOT(foo=1) + NOT(EQ(Field('foo'), 1)) + + If not called with exactly one condition, will throw an exception: + + >>> NOT(EQ("foo", 1), EQ("bar", 2)) + Traceback (most recent call last): + TypeError: NOT() takes from 0 to 1 positional arguments but 2 were given + + >>> NOT(EQ("foo", 1), bar=2) + Traceback (most recent call last): + ValueError: NOT() requires exactly one condition; got 2 + + >>> NOT(foo=1, bar=2) + Traceback (most recent call last): + ValueError: NOT() requires exactly one condition; got 2 + + >>> NOT() + Traceback (most recent call last): + ValueError: NOT() requires exactly one condition; got 0 + """ + items: List[Formula] = [EQ(Field(k), v) for (k, v) in fields.items()] + if component: + items.append(component) + if (count := len(items)) != 1: + raise ValueError(f"NOT() requires exactly one condition; got {count}") + return Compound.build("NOT", items) + + +def match(field_values: Fields, *, match_any: bool = False) -> Formula: + r""" + Create one or more equality expressions for each provided value, + treating keys as field names and values as values (not formula expressions). + + If more than one assertion is included, the expressions are + grouped together into using ``AND()`` (all values must match). + If ``match_any=True``, expressions are grouped with ``OR()``. - Usage: >>> match({"First Name": "John", "Age": 21}) - "AND({First Name}='John',{Age}=21)" + AND(EQ(Field('First Name'), 'John'), + EQ(Field('Age'), 21)) + >>> match({"First Name": "John", "Age": 21}, match_any=True) - "OR({First Name}='John',{Age}=21)" - >>> match({"First Name": "John"}) - "{First Name}='John'" - >>> match({"Registered": True}) - "{Registered}=1" - >>> match({"Owner's Name": "Mike"}) - "{Owner\\'s Name}='Mike'" + OR(EQ(Field('First Name'), 'John'), + EQ(Field('Age'), 21)) + + To use comparisons other than equality, use a 2-tuple of ``(operator, value)`` + as the value for a particular field. For example: + + >>> match({"First Name": "John", "Age": (">=", 21)}) + AND(EQ(Field('First Name'), 'John'), + GTE(Field('Age'), 21)) + + If you need more advanced matching you can build formula expressions using lower + level primitives. + Args: + field_values: mapping of column names to values + (or to 2-tuples of the format ``(operator, value)``). + match_any: + If ``True``, matches if *any* of the provided values match. + Otherwise, all values must match. """ - expressions = [] - for key, value in dict_values.items(): - expression = EQUAL(FIELD(key), to_airtable_value(value)) - expressions.append(expression) + expressions: List[Formula] = [] + + for key, val in field_values.items(): + if isinstance(val, tuple) and len(val) == 2: + cmp, val = COMPARISONS_BY_OPERATOR[val[0]], val[1] + else: + cmp = EQ + expressions.append(cmp(Field(key), val)) if len(expressions) == 0: - return "" - elif len(expressions) == 1: + raise ValueError( + "match() requires at least one field-value pair or keyword argument" + ) + if len(expressions) == 1: return expressions[0] - else: - if not match_any: - return AND(*expressions) - else: - return OR(*expressions) + if match_any: + return OR(*expressions) + return AND(*expressions) -def escape_quotes(value: str) -> str: +def to_formula(value: Any) -> Formula: + """ + Converts the given value into a Formula object. + + When given a Formula object, it returns the object as-is: + + >>> to_formula(EQ(F.Formula("a"), "b")) + EQ(Formula('a'), 'b') + + When given a scalar value, it simply wraps that value's string representation + in a Formula object: + + >>> to_formula(1) + Formula('1') + >>> to_formula('foo') + Formula("'foo'") + + Boolean and date values receive custom function calls: + + >>> to_formula(True) + TRUE() + >>> to_formula(False) + FALSE() + >>> to_formula(datetime.date(2023, 12, 1)) + DATETIME_PARSE('2023-12-01') + >>> to_formula(datetime.datetime(2023, 12, 1, 12, 34, 56)) + DATETIME_PARSE('2023-12-01T12:34:56.000Z') + """ + if isinstance(value, Formula): + return value + if isinstance(value, bool): + return TRUE() if value else FALSE() + if isinstance(value, (int, float, Decimal, Fraction)): + return Formula(str(value)) + if isinstance(value, str): + return Formula(quoted(value)) + if isinstance(value, datetime.datetime): + return DATETIME_PARSE(datetime_to_iso_str(value)) + if isinstance(value, datetime.date): + return DATETIME_PARSE(date_to_iso_str(value)) + + # Runtime import to avoid circular dependency + import pyairtable.orm + + if isinstance(value, pyairtable.orm.fields.Field): + return Field(value.field_name) + + raise TypeError(value, type(value)) + + +def to_formula_str(value: Any) -> str: + """ + Converts the given value into a string representation that can be used + in an Airtable formula expression. + + >>> to_formula_str(EQ(F.Formula("a"), "b")) + "a='b'" + >>> to_formula_str(True) + 'TRUE()' + >>> to_formula_str(False) + 'FALSE()' + >>> to_formula_str(3) + '3' + >>> to_formula_str(3.5) + '3.5' + >>> to_formula_str(Decimal("3.14159265")) + '3.14159265' + >>> to_formula_str(Fraction("4/19")) + '4/19' + >>> to_formula_str("asdf") + "'asdf'" + >>> to_formula_str("Jane's") + "'Jane\\'s'" + >>> to_formula_str(datetime.date(2023, 12, 1)) + "DATETIME_PARSE('2023-12-01')" + >>> to_formula_str(datetime.datetime(2023, 12, 1, 12, 34, 56)) + "DATETIME_PARSE('2023-12-01T12:34:56.000Z')" + """ + return str(to_formula(value)) + + +def quoted(value: str) -> str: + r""" + Wrap string in quotes. This is needed when referencing a string inside a formula. + Quotes are escaped. + + >>> quoted("John") + "'John'" + >>> quoted("Guest's Name") + "'Guest\\'s Name'" + """ + value = value.replace("\\", r"\\").replace("'", r"\'") + return "'{}'".format(value) + + +def escape_quotes(value: str) -> str: # pragma: no cover r""" - Ensures any quotes are escaped. Already escaped quotes are ignored. + Ensure any quotes are escaped. Already escaped quotes are ignored. + + This function has been deprecated. + Use :func:`~pyairtable.formulas.quoted` instead. Args: value: text to be escaped Usage: - >>> escape_quotes("Player's Name") - Player\'s Name - >>> escape_quotes("Player\'s Name") - Player\'s Name + >>> escape_quotes(r"Player's Name") + "Player\\'s Name" + >>> escape_quotes(r"Player\'s Name") + "Player\\'s Name" """ + warnings.warn( + "escape_quotes is deprecated; use quoted() instead.", + category=DeprecationWarning, + stacklevel=2, + ) escaped_value = re.sub("(? Any: +def field_name(name: str) -> str: + r""" + Create a reference to a field. Quotes are escaped. + + Args: + name: field name + + Usage: + >>> field_name("First Name") + '{First Name}' + >>> field_name("Guest's Name") + "{Guest's Name}" """ - Cast value to appropriate airtable types and format. - For example, to check ``bool`` values in formulas, you actually to compare - to 0 and 1. + # This will not actually work with field names that contain more + # than one closing curly brace; that's a limitation of Airtable. + # Our library will escape all closing braces, but the API will fail. + return "{%s}" % name.replace("}", r"\}") - .. list-table:: - :widths: 25 75 - :header-rows: 1 - * - Input - - Output - * - ``bool`` - - ``int`` - * - ``str`` - - ``str``; text is wrapped in `'single quotes'`; existing quotes are escaped. - * - all others - - unchanged +FunctionArg: TypeAlias = Union[ + str, + int, + float, + bool, + Decimal, + Fraction, + Formula, + datetime.date, + datetime.datetime, +] - Args: - value: value to be cast. +class FunctionCall(Formula): """ - if isinstance(value, bool): - return int(value) - elif isinstance(value, (int, float)): - return value - elif isinstance(value, str): - return STR_VALUE(value) - elif isinstance(value, datetime): - return datetime_to_iso_str(value) - elif isinstance(value, date): - return date_to_iso_str(value) + Represents a function call in an Airtable formula, and converts + all arguments to that function into Airtable formula expressions. + + >>> FunctionCall("WEEKDAY", datetime.date(2024, 1, 1)) + WEEKDAY(datetime.date(2024, 1, 1)) + >>> str(_) + "WEEKDAY(DATETIME_PARSE('2024-01-01'))" + + pyAirtable exports shortcuts like :meth:`~pyairtable.formulas.WEEKDAY` + for all formula functions known at time of publishing. + """ + + def __init__(self, name: str, *args: FunctionArg): + self.name = name + self.args = args + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, FunctionCall): + return False + return (self.name, self.args) == (other.name, other.args) + + def __str__(self) -> str: + joined_args = ", ".join(to_formula_str(v) for v in self.args) + return f"{self.name}({joined_args})" + + def __repr__(self) -> str: + joined_args_repr = ", ".join(repr(v) for v in self.args) + return f"{self.name}({joined_args_repr})" + + +# fmt: off +r"""[[[cog]]] + +import re +from pathlib import Path + +definitions = [ + line.strip() + for line in Path(cog.inFile).with_suffix(".txt").read_text().splitlines() + if line.strip() + and not line.startswith("#") +] + +cog.outl("\n") + +for definition in definitions: + comment = "" + if "#" in definition: + definition, comment = (x.strip() for x in definition.split("#", 1)) + + name, argspec = definition.rstrip(")").split("(", 1) + if name in ("AND", "OR", "NOT"): + continue + + args = [ + re.sub( + "([a-z])([A-Z])", + lambda m: m[1] + "_" + m[2].lower(), + name.strip() + ) + for name in argspec.split(",") + ] + + required = [arg for arg in args if arg and not arg.startswith("[")] + optional = [arg.strip("[]") for arg in args if arg.startswith("[") and arg.endswith("]")] + signature = [f"{arg}: FunctionArg" for arg in required] + params = [*required] + splat = optional.pop().rstrip(".") if optional and optional[-1].endswith("...") else None + + if optional: + signature += [f"{arg}: Optional[FunctionArg] = None" for arg in optional] + params += ["*(v for v in [" + ", ".join(optional) + "] if v is not None)"] + + if required or optional: + signature += ["/"] + + if splat: + signature += [f"*{splat}: FunctionArg"] + params += [f"*{splat}"] + + joined_signature = ", ".join(signature) + joined_params = (", " + ", ".join(params)) if params else "" + + cog.outl(f"def {name}({joined_signature}) -> FunctionCall:") + cog.outl(f" \"\"\"") + if comment: + cog.outl(f" {comment}") else: - return value + cog.outl(f" Produce a formula that calls ``{name}()``") + cog.outl(f" \"\"\"") + cog.outl(f" return FunctionCall({name!r}{joined_params})") + cog.outl("\n") + +[[[out]]]""" -def EQUAL(left: Any, right: Any) -> str: +def ABS(value: FunctionArg, /) -> FunctionCall: """ - Creates an equality assertion + Returns the absolute value. + """ + return FunctionCall('ABS', value) + - >>> EQUAL(2,2) - '2=2' +def AVERAGE(number: FunctionArg, /, *numbers: FunctionArg) -> FunctionCall: """ - return "{}={}".format(left, right) + Returns the average of the numbers. + """ + return FunctionCall('AVERAGE', number, *numbers) -def FIELD(name: str) -> str: +def BLANK() -> FunctionCall: + """ + Returns a blank value. """ - Creates a reference to a field. Quotes are escaped. + return FunctionCall('BLANK') - Args: - name: field name - Usage: - >>> FIELD("First Name") - '{First Name}' - >>> FIELD("Guest's Name") - "{Guest\\'s Name}" +def CEILING(value: FunctionArg, significance: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Returns the nearest integer multiple of significance that is greater than or equal to the value. If no significance is provided, a significance of 1 is assumed. """ - return "{%s}" % escape_quotes(name) + return FunctionCall('CEILING', value, *(v for v in [significance] if v is not None)) -def STR_VALUE(value: str) -> str: +def CONCATENATE(text: FunctionArg, /, *texts: FunctionArg) -> FunctionCall: """ - Wraps string in quotes. This is needed when referencing a string inside a formula. - Quotes are escaped. + Joins together the text arguments into a single text value. + """ + return FunctionCall('CONCATENATE', text, *texts) - >>> STR_VALUE("John") - "'John'" - >>> STR_VALUE("Guest's Name") - "'Guest\\'s Name'" - >>> EQUAL(STR_VALUE("John"), FIELD("First Name")) - "'John'={First Name}" + +def COUNT(number: FunctionArg, /, *numbers: FunctionArg) -> FunctionCall: """ - return "'{}'".format(escape_quotes(str(value))) + Count the number of numeric items. + """ + return FunctionCall('COUNT', number, *numbers) -def IF(logical: str, value1: str, value2: str) -> str: +def COUNTA(value: FunctionArg, /, *values: FunctionArg) -> FunctionCall: + """ + Count the number of non-empty values. This function counts both numeric and text values. """ - Creates an IF statement + return FunctionCall('COUNTA', value, *values) - >>> IF(1=1, 0, 1) - 'IF(1=1, 0, 1)' + +def COUNTALL(value: FunctionArg, /, *values: FunctionArg) -> FunctionCall: + """ + Count the number of all elements including text and blanks. """ - return "IF({}, {}, {})".format(logical, value1, value2) + return FunctionCall('COUNTALL', value, *values) -def FIND(what: str, where: str, start_position: int = 0) -> str: +def CREATED_TIME() -> FunctionCall: """ - Creates a FIND statement + Returns the date and time a given record was created. + """ + return FunctionCall('CREATED_TIME') - >>> FIND(STR(2021), FIELD('DatetimeCol')) - "FIND('2021', {DatetimeCol})" - Args: - what: String to search for - where: Where to search. Could be a string, or a field reference. - start_position: Index of where to start search. Default is 0. +def DATEADD(date: FunctionArg, number: FunctionArg, units: FunctionArg, /) -> FunctionCall: + """ + Adds specified "count" units to a datetime. (See `list of shared unit specifiers `__. For this function we recommend using the full unit specifier for your desired unit.) + """ + return FunctionCall('DATEADD', date, number, units) + +def DATESTR(date: FunctionArg, /) -> FunctionCall: """ - if start_position: - return "FIND({}, {}, {})".format(what, where, start_position) - else: - return "FIND({}, {})".format(what, where) + Formats a datetime into a string (YYYY-MM-DD). + """ + return FunctionCall('DATESTR', date) + + +def DATETIME_DIFF(date1: FunctionArg, date2: FunctionArg, units: FunctionArg, /) -> FunctionCall: + """ + Returns the difference between datetimes in specified units. The difference between datetimes is determined by subtracting [date2] from [date1]. This means that if [date2] is later than [date1], the resulting value will be negative. + """ + return FunctionCall('DATETIME_DIFF', date1, date2, units) + + +def DATETIME_FORMAT(date: FunctionArg, output_format: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Formats a datetime into a specified string. See an `explanation of how to use this function with date fields `__ or a list of `supported format specifiers `__. + """ + return FunctionCall('DATETIME_FORMAT', date, *(v for v in [output_format] if v is not None)) + + +def DATETIME_PARSE(date: FunctionArg, input_format: Optional[FunctionArg] = None, locale: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Interprets a text string as a structured date, with optional input format and locale parameters. The output format will always be formatted 'M/D/YYYY h:mm a'. + """ + return FunctionCall('DATETIME_PARSE', date, *(v for v in [input_format, locale] if v is not None)) + + +def DAY(date: FunctionArg, /) -> FunctionCall: + """ + Returns the day of the month of a datetime in the form of a number between 1-31. + """ + return FunctionCall('DAY', date) + + +def ENCODE_URL_COMPONENT(component_string: FunctionArg, /) -> FunctionCall: + """ + Replaces certain characters with encoded equivalents for use in constructing URLs or URIs. Does not encode the following characters: ``-_.~`` + """ + return FunctionCall('ENCODE_URL_COMPONENT', component_string) + + +def ERROR() -> FunctionCall: + """ + Returns a generic Error value (``#ERROR!``). + """ + return FunctionCall('ERROR') + + +def EVEN(value: FunctionArg, /) -> FunctionCall: + """ + Returns the smallest even integer that is greater than or equal to the specified value. + """ + return FunctionCall('EVEN', value) + + +def EXP(power: FunctionArg, /) -> FunctionCall: + """ + Computes **Euler's number** (e) to the specified power. + """ + return FunctionCall('EXP', power) + + +def FALSE() -> FunctionCall: + """ + Logical value false. False is represented numerically by a 0. + """ + return FunctionCall('FALSE') + + +def FIND(string_to_find: FunctionArg, where_to_search: FunctionArg, start_from_position: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Finds an occurrence of stringToFind in whereToSearch string starting from an optional startFromPosition.(startFromPosition is 0 by default.) If no occurrence of stringToFind is found, the result will be 0. + """ + return FunctionCall('FIND', string_to_find, where_to_search, *(v for v in [start_from_position] if v is not None)) + + +def FLOOR(value: FunctionArg, significance: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Returns the nearest integer multiple of significance that is less than or equal to the value. If no significance is provided, a significance of 1 is assumed. + """ + return FunctionCall('FLOOR', value, *(v for v in [significance] if v is not None)) + + +def FROMNOW(date: FunctionArg, /) -> FunctionCall: + """ + Calculates the number of days between the current date and another date. + """ + return FunctionCall('FROMNOW', date) + + +def HOUR(datetime: FunctionArg, /) -> FunctionCall: + """ + Returns the hour of a datetime as a number between 0 (12:00am) and 23 (11:00pm). + """ + return FunctionCall('HOUR', datetime) + + +def IF(expression: FunctionArg, if_true: FunctionArg, if_false: FunctionArg, /) -> FunctionCall: + """ + Returns value1 if the logical argument is true, otherwise it returns value2. Can also be used to make `nested IF statements `__. + """ + return FunctionCall('IF', expression, if_true, if_false) + + +def INT(value: FunctionArg, /) -> FunctionCall: + """ + Returns the greatest integer that is less than or equal to the specified value. + """ + return FunctionCall('INT', value) + + +def ISERROR(expr: FunctionArg, /) -> FunctionCall: + """ + Returns true if the expression causes an error. + """ + return FunctionCall('ISERROR', expr) + + +def IS_AFTER(date1: FunctionArg, date2: FunctionArg, /) -> FunctionCall: + """ + Determines if [date1] is later than [date2]. Returns 1 if yes, 0 if no. + """ + return FunctionCall('IS_AFTER', date1, date2) + + +def IS_BEFORE(date1: FunctionArg, date2: FunctionArg, /) -> FunctionCall: + """ + Determines if [date1] is earlier than [date2]. Returns 1 if yes, 0 if no. + """ + return FunctionCall('IS_BEFORE', date1, date2) + + +def IS_SAME(date1: FunctionArg, date2: FunctionArg, unit: FunctionArg, /) -> FunctionCall: + """ + Compares two dates up to a unit and determines whether they are identical. Returns 1 if yes, 0 if no. + """ + return FunctionCall('IS_SAME', date1, date2, unit) + + +def LAST_MODIFIED_TIME(*fields: FunctionArg) -> FunctionCall: + """ + Returns the date and time of the most recent modification made by a user in a non-computed field in the table. + """ + return FunctionCall('LAST_MODIFIED_TIME', *fields) + + +def LEFT(string: FunctionArg, how_many: FunctionArg, /) -> FunctionCall: + """ + Extract how many characters from the beginning of the string. + """ + return FunctionCall('LEFT', string, how_many) + + +def LEN(string: FunctionArg, /) -> FunctionCall: + """ + Returns the length of a string. + """ + return FunctionCall('LEN', string) + + +def LOG(number: FunctionArg, base: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Computes the logarithm of the value in provided base. The base defaults to 10 if not specified. + """ + return FunctionCall('LOG', number, *(v for v in [base] if v is not None)) + + +def LOWER(string: FunctionArg, /) -> FunctionCall: + """ + Makes a string lowercase. + """ + return FunctionCall('LOWER', string) + + +def MAX(number: FunctionArg, /, *numbers: FunctionArg) -> FunctionCall: + """ + Returns the largest of the given numbers. + """ + return FunctionCall('MAX', number, *numbers) + + +def MID(string: FunctionArg, where_to_start: FunctionArg, count: FunctionArg, /) -> FunctionCall: + """ + Extract a substring of count characters starting at whereToStart. + """ + return FunctionCall('MID', string, where_to_start, count) + + +def MIN(number: FunctionArg, /, *numbers: FunctionArg) -> FunctionCall: + """ + Returns the smallest of the given numbers. + """ + return FunctionCall('MIN', number, *numbers) + + +def MINUTE(datetime: FunctionArg, /) -> FunctionCall: + """ + Returns the minute of a datetime as an integer between 0 and 59. + """ + return FunctionCall('MINUTE', datetime) + + +def MOD(value: FunctionArg, divisor: FunctionArg, /) -> FunctionCall: + """ + Returns the remainder after dividing the first argument by the second. + """ + return FunctionCall('MOD', value, divisor) + + +def MONTH(date: FunctionArg, /) -> FunctionCall: + """ + Returns the month of a datetime as a number between 1 (January) and 12 (December). + """ + return FunctionCall('MONTH', date) + + +def NOW() -> FunctionCall: + """ + While similar to the TODAY() function, NOW() returns the current date AND time. + """ + return FunctionCall('NOW') + + +def ODD(value: FunctionArg, /) -> FunctionCall: + """ + Rounds positive value up the the nearest odd number and negative value down to the nearest odd number. + """ + return FunctionCall('ODD', value) + + +def POWER(base: FunctionArg, power: FunctionArg, /) -> FunctionCall: + """ + Computes the specified base to the specified power. + """ + return FunctionCall('POWER', base, power) + + +def RECORD_ID() -> FunctionCall: + """ + Returns the ID of the current record. + """ + return FunctionCall('RECORD_ID') + + +def REGEX_EXTRACT(string: FunctionArg, regex: FunctionArg, /) -> FunctionCall: + """ + Returns the first substring that matches a regular expression. + """ + return FunctionCall('REGEX_EXTRACT', string, regex) + + +def REGEX_MATCH(string: FunctionArg, regex: FunctionArg, /) -> FunctionCall: + """ + Returns whether the input text matches a regular expression. + """ + return FunctionCall('REGEX_MATCH', string, regex) + + +def REGEX_REPLACE(string: FunctionArg, regex: FunctionArg, replacement: FunctionArg, /) -> FunctionCall: + """ + Substitutes all matching substrings with a replacement string value. + """ + return FunctionCall('REGEX_REPLACE', string, regex, replacement) + + +def REPLACE(string: FunctionArg, start_character: FunctionArg, number_of_characters: FunctionArg, replacement: FunctionArg, /) -> FunctionCall: + """ + Replaces the number of characters beginning with the start character with the replacement text. + """ + return FunctionCall('REPLACE', string, start_character, number_of_characters, replacement) + + +def REPT(string: FunctionArg, number: FunctionArg, /) -> FunctionCall: + """ + Repeats string by the specified number of times. + """ + return FunctionCall('REPT', string, number) + + +def RIGHT(string: FunctionArg, how_many: FunctionArg, /) -> FunctionCall: + """ + Extract howMany characters from the end of the string. + """ + return FunctionCall('RIGHT', string, how_many) + + +def ROUND(value: FunctionArg, precision: FunctionArg, /) -> FunctionCall: + """ + Rounds the value to the number of decimal places given by "precision." (Specifically, ROUND will round to the nearest integer at the specified precision, with ties broken by `rounding half up toward positive infinity `__.) + """ + return FunctionCall('ROUND', value, precision) + + +def ROUNDDOWN(value: FunctionArg, precision: FunctionArg, /) -> FunctionCall: + """ + Rounds the value to the number of decimal places given by "precision," always `rounding down `__. + """ + return FunctionCall('ROUNDDOWN', value, precision) + + +def ROUNDUP(value: FunctionArg, precision: FunctionArg, /) -> FunctionCall: + """ + Rounds the value to the number of decimal places given by "precision," always `rounding up `__. + """ + return FunctionCall('ROUNDUP', value, precision) + + +def SEARCH(string_to_find: FunctionArg, where_to_search: FunctionArg, start_from_position: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Searches for an occurrence of stringToFind in whereToSearch string starting from an optional startFromPosition. (startFromPosition is 0 by default.) If no occurrence of stringToFind is found, the result will be empty. + """ + return FunctionCall('SEARCH', string_to_find, where_to_search, *(v for v in [start_from_position] if v is not None)) -def AND(*args: str) -> str: +def SECOND(datetime: FunctionArg, /) -> FunctionCall: """ - Creates an AND Statement + Returns the second of a datetime as an integer between 0 and 59. + """ + return FunctionCall('SECOND', datetime) + + +def SET_LOCALE(date: FunctionArg, locale_modifier: FunctionArg, /) -> FunctionCall: + """ + Sets a specific locale for a datetime. **Must be used in conjunction with DATETIME_FORMAT.** A list of supported locale modifiers can be found `here `__. + """ + return FunctionCall('SET_LOCALE', date, locale_modifier) + + +def SET_TIMEZONE(date: FunctionArg, tz_identifier: FunctionArg, /) -> FunctionCall: + """ + Sets a specific timezone for a datetime. **Must be used in conjunction with DATETIME_FORMAT.** A list of supported timezone identifiers can be found `here `__. + """ + return FunctionCall('SET_TIMEZONE', date, tz_identifier) - >>> AND(1, 2, 3) - 'AND(1, 2, 3)' + +def SQRT(value: FunctionArg, /) -> FunctionCall: + """ + Returns the square root of a nonnegative number. """ - return "AND({})".format(",".join(args)) + return FunctionCall('SQRT', value) -def OR(*args: str) -> str: +def SUBSTITUTE(string: FunctionArg, old_text: FunctionArg, new_text: FunctionArg, index: Optional[FunctionArg] = None, /) -> FunctionCall: """ - .. versionadded:: 1.2.0 + Replaces occurrences of old_text in string with new_text. + """ + return FunctionCall('SUBSTITUTE', string, old_text, new_text, *(v for v in [index] if v is not None)) - Creates an OR Statement - >>> OR(1, 2, 3) - 'OR(1, 2, 3)' +def SUM(number: FunctionArg, /, *numbers: FunctionArg) -> FunctionCall: + """ + Sum together the numbers. Equivalent to number1 + number2 + ... """ - return "OR({})".format(",".join(args)) + return FunctionCall('SUM', number, *numbers) -def LOWER(value: str) -> str: +def SWITCH(expression: FunctionArg, pattern: FunctionArg, result: FunctionArg, /, *pattern_results: FunctionArg) -> FunctionCall: + """ + Takes an expression, a list of possible values for that expression, and for each one, a value that the expression should take in that case. It can also take a default value if the expression input doesn't match any of the defined patterns. In many cases, SWITCH() can be used instead `of a nested IF() formula `__. """ - .. versionadded:: 1.3.0 + return FunctionCall('SWITCH', expression, pattern, result, *pattern_results) - Creates the LOWER function, making a string lowercase. - Can be used on a string or a field name and will lower all the strings in the field. - >>> LOWER("TestValue") - "LOWER(TestValue)" +def T(value: FunctionArg, /) -> FunctionCall: """ - return "LOWER({})".format(value) + Returns the argument if it is text and blank otherwise. + """ + return FunctionCall('T', value) + + +def TIMESTR(timestamp: FunctionArg, /) -> FunctionCall: + """ + Formats a datetime into a time-only string (HH:mm:ss). + """ + return FunctionCall('TIMESTR', timestamp) + + +def TODAY() -> FunctionCall: + """ + While similar to the NOW() function: TODAY() returns the current date (not the current time, if formatted, time will return 12:00am). + """ + return FunctionCall('TODAY') + + +def TONOW(date: FunctionArg, /) -> FunctionCall: + """ + Calculates the number of days between the current date and another date. + """ + return FunctionCall('TONOW', date) + + +def TRIM(string: FunctionArg, /) -> FunctionCall: + """ + Removes whitespace at the beginning and end of string. + """ + return FunctionCall('TRIM', string) + + +def TRUE() -> FunctionCall: + """ + Logical value true. The value of true is represented numerically by a 1. + """ + return FunctionCall('TRUE') + + +def UPPER(string: FunctionArg, /) -> FunctionCall: + """ + Makes string uppercase. + """ + return FunctionCall('UPPER', string) + + +def VALUE(text: FunctionArg, /) -> FunctionCall: + """ + Converts the text string to a number. Some exceptions apply—if the string contains certain mathematical operators(-,%) the result may not return as expected. In these scenarios we recommend using a combination of VALUE and REGEX_REPLACE to remove non-digit values from the string: + """ + return FunctionCall('VALUE', text) + + +def WEEKDAY(date: FunctionArg, start_day_of_week: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Returns the day of the week as an integer between 0 (Sunday) and 6 (Saturday). You may optionally provide a second argument (either ``"Sunday"`` or ``"Monday"``) to start weeks on that day. If omitted, weeks start on Sunday by default. + """ + return FunctionCall('WEEKDAY', date, *(v for v in [start_day_of_week] if v is not None)) + + +def WEEKNUM(date: FunctionArg, start_day_of_week: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Returns the week number in a year. You may optionally provide a second argument (either ``"Sunday"`` or ``"Monday"``) to start weeks on that day. If omitted, weeks start on Sunday by default. + """ + return FunctionCall('WEEKNUM', date, *(v for v in [start_day_of_week] if v is not None)) + + +def WORKDAY(start_date: FunctionArg, num_days: FunctionArg, holidays: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Returns a date that is numDays working days after startDate. Working days exclude weekends and an optional list of holidays, formatted as a comma-separated string of ISO-formatted dates. + """ + return FunctionCall('WORKDAY', start_date, num_days, *(v for v in [holidays] if v is not None)) + + +def WORKDAY_DIFF(start_date: FunctionArg, end_date: FunctionArg, holidays: Optional[FunctionArg] = None, /) -> FunctionCall: + """ + Counts the number of working days between startDate and endDate. Working days exclude weekends and an optional list of holidays, formatted as a comma-separated string of ISO-formatted dates. + """ + return FunctionCall('WORKDAY_DIFF', start_date, end_date, *(v for v in [holidays] if v is not None)) + + +def XOR(expression: FunctionArg, /, *expressions: FunctionArg) -> FunctionCall: + """ + Returns true if an **odd** number of arguments are true. + """ + return FunctionCall('XOR', expression, *expressions) + + +def YEAR(date: FunctionArg, /) -> FunctionCall: + """ + Returns the four-digit year of a datetime. + """ + return FunctionCall('YEAR', date) + + +# [[[end]]] (sum: 6J+3KYcsIL) +# fmt: on diff --git a/pyairtable/formulas.txt b/pyairtable/formulas.txt new file mode 100644 index 00000000..5308c048 --- /dev/null +++ b/pyairtable/formulas.txt @@ -0,0 +1,82 @@ +# Retrieved from https://www.airtable.com/universe/expHF9XTWWwAT299z +# and then edited by hand for consistency and correctness. + +ABS(value) # Returns the absolute value. +AND(expression, [expressions...]) # Returns true if all the arguments are true, returns false otherwise. +AVERAGE(number, [numbers...]) # Returns the average of the numbers. +BLANK() # Returns a blank value. +CEILING(value, [significance]) # Returns the nearest integer multiple of significance that is greater than or equal to the value. If no significance is provided, a significance of 1 is assumed. +CONCATENATE(text, [texts...]) # Joins together the text arguments into a single text value. +COUNT(number, [numbers...]) # Count the number of numeric items. +COUNTA(value, [values...]) # Count the number of non-empty values. This function counts both numeric and text values. +COUNTALL(value, [values...]) # Count the number of all elements including text and blanks. +CREATED_TIME() # Returns the date and time a given record was created. +DATEADD(date, number, units) # Adds specified "count" units to a datetime. (See `list of shared unit specifiers `__. For this function we recommend using the full unit specifier for your desired unit.) +DATESTR(date) # Formats a datetime into a string (YYYY-MM-DD). +DATETIME_DIFF(date1, date2, units) # Returns the difference between datetimes in specified units. The difference between datetimes is determined by subtracting [date2] from [date1]. This means that if [date2] is later than [date1], the resulting value will be negative. +DATETIME_FORMAT(date, [output_format]) # Formats a datetime into a specified string. See an `explanation of how to use this function with date fields `__ or a list of `supported format specifiers `__. +DATETIME_PARSE(date, [input_format], [locale]) # Interprets a text string as a structured date, with optional input format and locale parameters. The output format will always be formatted 'M/D/YYYY h:mm a'. +DAY(date) # Returns the day of the month of a datetime in the form of a number between 1-31. +ENCODE_URL_COMPONENT(component_string) # Replaces certain characters with encoded equivalents for use in constructing URLs or URIs. Does not encode the following characters: ``-_.~`` +ERROR() # Returns a generic Error value (``#ERROR!``). +EVEN(value) # Returns the smallest even integer that is greater than or equal to the specified value. +EXP(power) # Computes **Euler's number** (e) to the specified power. +FALSE() # Logical value false. False is represented numerically by a 0. +FIND(stringToFind, whereToSearch, [startFromPosition]) # Finds an occurrence of stringToFind in whereToSearch string starting from an optional startFromPosition.(startFromPosition is 0 by default.) If no occurrence of stringToFind is found, the result will be 0. +FLOOR(value, [significance]) # Returns the nearest integer multiple of significance that is less than or equal to the value. If no significance is provided, a significance of 1 is assumed. +FROMNOW(date) # Calculates the number of days between the current date and another date. +HOUR(datetime) # Returns the hour of a datetime as a number between 0 (12:00am) and 23 (11:00pm). +IF(expression, if_true, if_false) # Returns value1 if the logical argument is true, otherwise it returns value2. Can also be used to make `nested IF statements `__. +INT(value) # Returns the greatest integer that is less than or equal to the specified value. +ISERROR(expr) # Returns true if the expression causes an error. +IS_AFTER(date1, date2) # Determines if [date1] is later than [date2]. Returns 1 if yes, 0 if no. +IS_BEFORE(date1, date2) # Determines if [date1] is earlier than [date2]. Returns 1 if yes, 0 if no. +IS_SAME(date1, date2, unit) # Compares two dates up to a unit and determines whether they are identical. Returns 1 if yes, 0 if no. +LAST_MODIFIED_TIME([fields...]) # Returns the date and time of the most recent modification made by a user in a non-computed field in the table. +LEFT(string, howMany) # Extract how many characters from the beginning of the string. +LEN(string) # Returns the length of a string. +LOG(number, [base]) # Computes the logarithm of the value in provided base. The base defaults to 10 if not specified. +LOWER(string) # Makes a string lowercase. +MAX(number, [numbers...]) # Returns the largest of the given numbers. +MID(string, whereToStart, count) # Extract a substring of count characters starting at whereToStart. +MIN(number, [numbers...]) # Returns the smallest of the given numbers. +MINUTE(datetime) # Returns the minute of a datetime as an integer between 0 and 59. +MOD(value, divisor) # Returns the remainder after dividing the first argument by the second. +MONTH(date) # Returns the month of a datetime as a number between 1 (January) and 12 (December). +NOT(expression) # Reverses the logical value of its argument. +NOW() # While similar to the TODAY() function, NOW() returns the current date AND time. +ODD(value) # Rounds positive value up the the nearest odd number and negative value down to the nearest odd number. +OR(expression, [expressions...]) # Returns true if any one of the arguments is true. +POWER(base, power) # Computes the specified base to the specified power. +RECORD_ID() # Returns the ID of the current record. +REGEX_EXTRACT(string, regex) # Returns the first substring that matches a regular expression. +REGEX_MATCH(string, regex) # Returns whether the input text matches a regular expression. +REGEX_REPLACE(string, regex, replacement) # Substitutes all matching substrings with a replacement string value. +REPLACE(string, start_character, number_of_characters, replacement) # Replaces the number of characters beginning with the start character with the replacement text. +REPT(string, number) # Repeats string by the specified number of times. +RIGHT(string, howMany) # Extract howMany characters from the end of the string. +ROUND(value, precision) # Rounds the value to the number of decimal places given by "precision." (Specifically, ROUND will round to the nearest integer at the specified precision, with ties broken by `rounding half up toward positive infinity `__.) +ROUNDDOWN(value, precision) # Rounds the value to the number of decimal places given by "precision," always `rounding down `__. +ROUNDUP(value, precision) # Rounds the value to the number of decimal places given by "precision," always `rounding up `__. +SEARCH(stringToFind, whereToSearch, [startFromPosition]) # Searches for an occurrence of stringToFind in whereToSearch string starting from an optional startFromPosition. (startFromPosition is 0 by default.) If no occurrence of stringToFind is found, the result will be empty. +SECOND(datetime) # Returns the second of a datetime as an integer between 0 and 59. +SET_LOCALE(date, locale_modifier) # Sets a specific locale for a datetime. **Must be used in conjunction with DATETIME_FORMAT.** A list of supported locale modifiers can be found `here `__. +SET_TIMEZONE(date, tz_identifier) # Sets a specific timezone for a datetime. **Must be used in conjunction with DATETIME_FORMAT.** A list of supported timezone identifiers can be found `here `__. +SQRT(value) # Returns the square root of a nonnegative number. +SUBSTITUTE(string, old_text, new_text, [index]) # Replaces occurrences of old_text in string with new_text. +SUM(number, [numbers...]) # Sum together the numbers. Equivalent to number1 + number2 + ... +SWITCH(expression, pattern, result, [pattern_results...]) # Takes an expression, a list of possible values for that expression, and for each one, a value that the expression should take in that case. It can also take a default value if the expression input doesn't match any of the defined patterns. In many cases, SWITCH() can be used instead `of a nested IF() formula `__. +T(value) # Returns the argument if it is text and blank otherwise. +TIMESTR(timestamp) # Formats a datetime into a time-only string (HH:mm:ss). +TODAY() # While similar to the NOW() function: TODAY() returns the current date (not the current time, if formatted, time will return 12:00am). +TONOW(date) # Calculates the number of days between the current date and another date. +TRIM(string) # Removes whitespace at the beginning and end of string. +TRUE() # Logical value true. The value of true is represented numerically by a 1. +UPPER(string) # Makes string uppercase. +VALUE(text) # Converts the text string to a number. Some exceptions apply—if the string contains certain mathematical operators(-,%) the result may not return as expected. In these scenarios we recommend using a combination of VALUE and REGEX_REPLACE to remove non-digit values from the string: +WEEKDAY(date, [startDayOfWeek]) # Returns the day of the week as an integer between 0 (Sunday) and 6 (Saturday). You may optionally provide a second argument (either ``"Sunday"`` or ``"Monday"``) to start weeks on that day. If omitted, weeks start on Sunday by default. +WEEKNUM(date, [startDayOfWeek]) # Returns the week number in a year. You may optionally provide a second argument (either ``"Sunday"`` or ``"Monday"``) to start weeks on that day. If omitted, weeks start on Sunday by default. +WORKDAY(startDate, numDays, [holidays]) # Returns a date that is numDays working days after startDate. Working days exclude weekends and an optional list of holidays, formatted as a comma-separated string of ISO-formatted dates. +WORKDAY_DIFF(startDate, endDate, [holidays]) # Counts the number of working days between startDate and endDate. Working days exclude weekends and an optional list of holidays, formatted as a comma-separated string of ISO-formatted dates. +XOR(expression, [expressions...]) # Returns true if an **odd** number of arguments are true. +YEAR(date) # Returns the four-digit year of a datetime. diff --git a/pyairtable/metadata.py b/pyairtable/metadata.py deleted file mode 100644 index 5cf815cf..00000000 --- a/pyairtable/metadata.py +++ /dev/null @@ -1,126 +0,0 @@ -from typing import Any, Dict, Optional, Union - -from pyairtable.api import Api, Base, Table - - -def get_api_bases(api: Union[Api, Base]) -> Dict[Any, Any]: - """ - Return list of Bases from an Api or Base instance. - For More Details `Metadata Api Documentation `_ - - Args: - api: :class:`Api` or :class:`Base` instance - - Usage: - >>> table.get_bases() - { - "bases": [ - { - "id": "appY3WxIBCdKPDdIa", - "name": "Apartment Hunting", - "permissionLevel": "create" - }, - { - "id": "appSW9R5uCNmRmfl6", - "name": "Project Tracker", - "permissionLevel": "edit" - } - ] - } - """ - api = api.api if isinstance(api, Base) else api - base_list_url = api.build_url("meta", "bases") - return { - "bases": [ - base - for page in api.iterate_requests("get", base_list_url) - for base in page.get("bases", []) - ] - } - - -def get_base_schema(base: Union[Base, Table]) -> Dict[Any, Any]: - """ - Returns Schema of a Base - For More Details `Metadata Api Documentation `_ - - Args: - base: :class:`Base` or :class:`Table` instance - - Usage: - >>> get_base_schema(base) - { - "tables": [ - { - "id": "tbltp8DGLhqbUmjK1", - "name": "Apartments", - "primaryFieldId": "fld1VnoyuotSTyxW1", - "fields": [ - { - "id": "fld1VnoyuotSTyxW1", - "name": "Name", - "type": "singleLineText" - }, - { - "id": "fldoaIqdn5szURHpw", - "name": "Pictures", - "type": "multipleAttachment" - }, - { - "id": "fldumZe00w09RYTW6", - "name": "District", - "type": "multipleRecordLinks" - } - ], - "views": [ - { - "id": "viwQpsuEDqHFqegkp", - "name": "Grid view", - "type": "grid" - } - ] - } - ] - } - """ - base = base.base if isinstance(base, Table) else base - base_schema_url = base.api.build_url("meta", "bases", base.id, "tables") - assert isinstance(response := base.api.request("get", base_schema_url), dict) - return response - - -def get_table_schema(table: Table) -> Optional[Dict[Any, Any]]: - """ - Returns the specific table schema record provided by base schema list - - Args: - table: :class:`Table` instance - - Usage: - >>> get_table_schema(table) - { - "id": "tbltp8DGLhqbUmjK1", - "name": "Apartments", - "primaryFieldId": "fld1VnoyuotSTyxW1", - "fields": [ - { - "id": "fld1VnoyuotSTyxW1", - "name": "Name", - "type": "singleLineText" - } - ], - "views": [ - { - "id": "viwQpsuEDqHFqegkp", - "name": "Grid view", - "type": "grid" - } - ] - } - """ - base_schema = get_base_schema(table) - for table_record in base_schema.get("tables", {}): - assert isinstance(table_record, dict) - if table.name == table_record["name"]: - return table_record - return None diff --git a/pyairtable/models/__init__.py b/pyairtable/models/__init__.py index 0ddaa611..ebf6a764 100644 --- a/pyairtable/models/__init__.py +++ b/pyairtable/models/__init__.py @@ -6,13 +6,20 @@ pyAirtable will wrap certain API responses in type-annotated models, some of which will be deeply nested within each other. Models which implementers can interact with directly are documented below. +Nested or internal models are documented in each submodule. + +Due to its complexity, the :mod:`pyairtable.models.schema` module is +documented separately, and none of its classes are exposed here. """ -from .collaborator import Collaborator -from .comment import Comment -from .webhook import Webhook, WebhookNotification, WebhookPayload +from pyairtable.models.audit import AuditLogEvent, AuditLogResponse +from pyairtable.models.collaborator import Collaborator +from pyairtable.models.comment import Comment +from pyairtable.models.webhook import Webhook, WebhookNotification, WebhookPayload __all__ = [ + "AuditLogResponse", + "AuditLogEvent", "Collaborator", "Comment", "Webhook", diff --git a/pyairtable/models/_base.py b/pyairtable/models/_base.py index 8bf3ff8f..a10e77b0 100644 --- a/pyairtable/models/_base.py +++ b/pyairtable/models/_base.py @@ -1,10 +1,30 @@ +from datetime import datetime from functools import partial -from typing import Any, ClassVar, Iterable, Mapping, Optional, Set, Type, Union +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Iterable, + Mapping, + Optional, + Set, + Type, + Union, +) import inflection +import pydantic from typing_extensions import Self as SelfType -from pyairtable._compat import pydantic +from pyairtable.utils import ( + _append_docstring_text, + datetime_from_iso_str, + datetime_to_iso_str, +) + +if TYPE_CHECKING: + from pyairtable.api.api import Api class AirtableModel(pydantic.BaseModel): @@ -12,113 +32,252 @@ class AirtableModel(pydantic.BaseModel): Base model for any data structures that will be loaded from the Airtable API. """ - class Config: - # Ignore field names we don't recognize, so applications don't crash - # if Airtable decides to add new attributes. - extra = "ignore" + model_config = pydantic.ConfigDict( + extra="ignore", + alias_generator=partial(inflection.camelize, uppercase_first_letter=False), + populate_by_name=True, + ) + + _raw: Dict[str, Any] = pydantic.PrivateAttr() - # Convert e.g. "base_invite_links" to "baseInviteLinks" for (de)serialization - alias_generator = partial(inflection.camelize, uppercase_first_letter=False) + def __init__(self, **data: Any) -> None: + raw = data.copy() - # Allow both base_invite_links= and baseInviteLinks= in constructor - allow_population_by_field_name = True + # Convert JSON-serializable input data to the types expected by our model. + # For now this only converts ISO 8601 strings to datetime objects. + for field_name, field_model in self.__class__.model_fields.items(): + for name in {field_name, field_model.alias}: + if not name or not (value := data.get(name)): + continue + if isinstance(value, str) and field_model.annotation is datetime: + data[name] = datetime_from_iso_str(value) - # We'll assume this in a couple different places - underscore_attrs_are_private = True + super().__init__(**data) - _raw: Any = pydantic.PrivateAttr() + self._raw = raw # must happen *after* __init__ @classmethod - def parse_obj(cls, obj: Any) -> SelfType: - instance = super().parse_obj(obj) - instance._raw = obj + def from_api( + cls, + obj: Dict[str, Any], + api: "Api", + *, + context: Optional[Any] = None, + ) -> SelfType: + """ + Construct an instance which is able to update itself using an + :class:`~pyairtable.Api`. + + Args: + obj: The JSON data structure used to construct the instance. + Will be passed to `parse_obj `_. + api: The connection to use for saving updates. + context: An object, sequence of objects, or mapping of names to objects + which will be used as arguments to ``str.format()`` when constructing + the URL for a :class:`~pyairtable.models._base.RestfulModel`. + """ + instance = cls(**obj) + cascade_api(instance, api, context=context) return instance -class SerializableModel(AirtableModel): +def _context_name(obj: Any) -> str: + return inflection.underscore(type(obj).__name__) + + +def cascade_api( + obj: Any, + api: "Api", + *, + context: Optional[Any] = None, +) -> None: + """ + Ensure all nested objects have access to the given Api instance, + and trigger them to configure their URLs accordingly. + + Args: + api: The instance of the API to set. + context: A mapping of class names to instances of that class. """ - Base model for any data structures that can be saved back to the API. + if context is None: + context = {} + # context=Foo() is short for context={"foo": Foo()} + if context and not isinstance(context, dict): + context = {_context_name(context): context} + + # Ensure we don't get stuck in infinite loops + visited: Set[int] = context.setdefault("__visited__", set()) + if id(obj) in visited: + return + visited.add(id(obj)) + + # Iterate over containers and cascade API context down to contained models. + if isinstance(obj, (list, tuple, set)): + for value in obj: + cascade_api(value, api, context=context) + if isinstance(obj, dict): + for key, value in obj.items(): + cascade_api(value, api, context={**context, "key": key}) + if not isinstance(obj, AirtableModel): + return + + # If we get this far, we're dealing with a model, so add it to the context. + # If it's a ModelNamedThis, the key will be model_named_this. + context = {**context, _context_name(obj): obj} + + if isinstance(obj, RestfulModel): + # This is what we came here for; set the API and URL on the RESTful model. + obj._set_api(api, context=context) + + # Find and apply API/context to nested models in every Pydantic field. + for field_name in type(obj).model_fields: + if field_value := getattr(obj, field_name, None): + cascade_api(field_value, api, context=context) + + +class RestfulModel(AirtableModel): + """ + Base model for any data structures that wrap around a REST API endpoint. Subclasses can pass a number of keyword arguments to control serialization behavior: - * ``writable=``: field names that should be written to API on ``save()``. - * ``readonly=``: field names that should not be written to API on ``save()``. - * ``allow_update=``: boolean indicating whether to allow ``save()`` (default: true) - * ``allow_delete=``: boolean indicating whether to allow ``delete()`` (default: true) + * ``url=``: format string for building the URL to be used when saving changes to this model. """ - __writable: ClassVar[Optional[Iterable[str]]] - __readonly: ClassVar[Optional[Iterable[str]]] - __allow_update: ClassVar[bool] - __allow_delete: ClassVar[bool] + __url_pattern: ClassVar[str] = "" - def __init_subclass__(cls, **kwargs: Any) -> None: - # These are private to SerializableModel - if "writable" in kwargs and "readonly" in kwargs: - raise ValueError("incompatible kwargs 'writable' and 'readonly'") - cls.__writable = kwargs.get("writable") - cls.__readonly = kwargs.get("readonly") - cls.__allow_update = bool(kwargs.get("allow_update", True)) - cls.__allow_delete = bool(kwargs.get("allow_delete", True)) + _api: "Api" = pydantic.PrivateAttr() + _url: str = pydantic.PrivateAttr(default="") + _url_context: Any = pydantic.PrivateAttr(default=None) - _api: "pyairtable.api.api.Api" = pydantic.PrivateAttr() - _url: str = pydantic.PrivateAttr() - _deleted: bool = pydantic.PrivateAttr(default=False) + def __init_subclass__(cls, **kwargs: Any) -> None: + cls.__url_pattern = kwargs.pop("url", cls.__url_pattern) + super().__init_subclass__() - @classmethod - def from_api(cls, api: "pyairtable.api.api.Api", url: str, obj: Any) -> SelfType: + def _set_api(self, api: "Api", context: Dict[str, Any]) -> None: """ - Constructs an instance which is able to update itself using an - :class:`~pyairtable.Api`. - - Args: - api: The connection to use for saving updates. - url: The URL which can receive PATCH or DELETE requests for this object. - obj: The JSON data structure used to construct the instance. - Will be passed to `parse_obj `_. + Set a link to the API and build the REST URL used for this resource. """ - parsed = cls.parse_obj(obj) - parsed._api = api - parsed._url = url - return parsed - - def save(self) -> None: + self._api = api + self._url_context = context + try: + self._url = self.__url_pattern.format(**context, self=self) + except (KeyError, AttributeError) as exc: + exc.args = ( + *exc.args, + {k: v for (k, v) in context.items() if k != "__visited__"}, + ) + raise + if self._url and not self._url.startswith("http"): + self._url = api.build_url(self._url) + + def _reload(self, obj: Optional[Dict[str, Any]] = None) -> None: + """ + Reload the model's contents from the given object, or by making a GET request to the API. """ - Save any changes made to the instance's writable fields. + if obj is None: + obj = self._api.get(self._url) + copyable = type(self).from_api(obj, self._api, context=self._url_context) + self.__dict__.update( + {key: copyable.__dict__.get(key) for key in type(self).model_fields} + ) - Will raise ``RuntimeError`` if the record has been deleted. + +class CanDeleteModel(RestfulModel): + """ + Mix-in for RestfulModel that allows a model to be deleted. + """ + + _deleted: bool = pydantic.PrivateAttr(default=False) + + @property + def deleted(self) -> bool: """ - if not self.__allow_update: - raise NotImplementedError(f"{self.__class__.__name__}.save() not allowed") - if self._deleted: - raise RuntimeError("save() called after delete()") - include = set(self.__writable) if self.__writable else None - exclude = set(self.__readonly) if self.__readonly else None - data = self.dict(by_alias=True, include=include, exclude=exclude) - response = self._api.request("PATCH", self._url, json=data) - copyable = self.parse_obj(response) - self.__dict__.update(copyable.__dict__) + Indicates whether the record has been deleted since being returned from the API. + """ + return self._deleted def delete(self) -> None: """ - Delete the record on the server and marks this instance as deleted. + Delete the record on the server and mark this instance as deleted. """ - if not self.__allow_delete: - raise NotImplementedError(f"{self.__class__.__name__}.delete() not allowed") + if not self._url: + raise RuntimeError("delete() called with no URL specified") self._api.request("DELETE", self._url) self._deleted = True - @property - def deleted(self) -> bool: + +class CanUpdateModel(RestfulModel): + """ + Mix-in for RestfulModel that allows a model to be modified and saved. + + Subclasses can pass a number of keyword arguments to control serialization behavior: + + * ``writable=``: field names that should be written to API on ``save()``. + * ``readonly=``: field names that should not be written to API on ``save()``. + * ``save_null_values=``: boolean indicating whether ``save()`` should write nulls (default: true) + """ + + __writable: ClassVar[Optional[Iterable[str]]] = None + __readonly: ClassVar[Optional[Iterable[str]]] = None + __save_none: ClassVar[bool] = True + __save_http_method: ClassVar[str] = "PATCH" + __reload_after_save: ClassVar[bool] = True + + def __init_subclass__(cls, **kwargs: Any) -> None: + if "writable" in kwargs and "readonly" in kwargs: + raise ValueError("incompatible kwargs 'writable' and 'readonly'") + cls.__writable = kwargs.pop("writable", cls.__writable) + cls.__readonly = kwargs.pop("readonly", cls.__readonly) + cls.__save_none = bool(kwargs.pop("save_null_values", cls.__save_none)) + cls.__save_http_method = kwargs.pop("save_method", cls.__save_http_method) + cls.__reload_after_save = bool( + kwargs.pop("reload_after_save", cls.__reload_after_save) + ) + if cls.__writable: + _append_docstring_refs( + cls, + "The following fields can be modified and saved", + cls.__writable, + ) + if cls.__readonly: + _append_docstring_refs( + cls, + "The following fields are read-only and cannot be modified", + cls.__readonly, + ) + super().__init_subclass__(**kwargs) + + def save(self) -> None: """ - Indicates whether the record has been deleted since being returned from the API. + Save any changes made to the instance's writable fields and update the + instance with any refreshed values returned from the API. + + Will raise ``RuntimeError`` if the record has been deleted. """ - return self._deleted + if getattr(self, "_deleted", None): + raise RuntimeError("save() called after delete()") + if not self._url: + raise RuntimeError("save() called with no URL specified") + include = set(self.__writable) if self.__writable else None + exclude = set(self.__readonly) if self.__readonly else None + data = self.model_dump( + by_alias=True, + include=include, + exclude=exclude, + exclude_none=(not self.__save_none), + ) + # This undoes the finagling we do in __init__, converting datetime back to str. + for key in data: + if isinstance(value := data.get(key), datetime): + data[key] = datetime_to_iso_str(value) + response = self._api.request(self.__save_http_method, self._url, json=data) + if self.__reload_after_save: + self._reload(response) def __setattr__(self, name: str, value: Any) -> None: # Prevents implementers from changing values on readonly or non-writable fields. - # Mypy can't tell that we are using pydantic v1. - if name in self.__class__.__fields__: # type: ignore[operator, unused-ignore] + if name in self.__class__.model_fields: if self.__readonly and name in self.__readonly: raise AttributeError(name) if self.__writable is not None and name not in self.__writable: @@ -127,7 +286,25 @@ def __setattr__(self, name: str, value: Any) -> None: super().__setattr__(name, value) -def update_forward_refs( +def _append_docstring_refs( + cls: Type[CanUpdateModel], + explanation: str, + field_names: Iterable[str], +) -> None: + """ + Used by CanUpdateModel to append a list of field names to the class docstring. + """ + field_refs = [ + f":attr:`~{cls.__module__}.{cls.__qualname__}.{field}`" for field in field_names + ] + _append_docstring_text( + cls, + f"{explanation}: " + ", ".join(field_refs), + before_re=r"^\s+Usage:", + ) + + +def rebuild_models( obj: Union[Type[AirtableModel], Mapping[str, Any]], memo: Optional[Set[int]] = None, ) -> None: @@ -147,7 +324,6 @@ def update_forward_refs( ... class B_Two(AirtableModel): ... >>> update_forward_refs(vars()) """ - # Avoid infinite circular loops memo = set() if memo is None else memo # If it's a type, update its refs, then do the same for any nested classes. # This will raise AttributeError if given a non-AirtableModel type. @@ -155,12 +331,9 @@ def update_forward_refs( if id(obj) in memo: return memo.add(id(obj)) - obj.update_forward_refs() - return update_forward_refs(vars(obj), memo=memo) + obj.model_rebuild() + return rebuild_models(vars(obj), memo=memo) # If it's a mapping, update refs for any AirtableModel instances. for value in obj.values(): if isinstance(value, type) and issubclass(value, AirtableModel): - update_forward_refs(value, memo=memo) - - -import pyairtable.api.api # noqa + rebuild_models(value, memo=memo) diff --git a/pyairtable/models/audit.py b/pyairtable/models/audit.py new file mode 100644 index 00000000..5f1a1c86 --- /dev/null +++ b/pyairtable/models/audit.py @@ -0,0 +1,81 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +import pydantic +from typing_extensions import TypeAlias + +from pyairtable.models._base import AirtableModel, rebuild_models + + +class AuditLogResponse(AirtableModel): + """ + Represents a page of audit log events. + + See `Audit log events `__ + for more information on how to interpret this data structure. + """ + + events: List["AuditLogEvent"] + pagination: Optional["AuditLogResponse.Pagination"] = None + + class Pagination(AirtableModel): + next: Optional[str] = None + previous: Optional[str] = None + + +class AuditLogEvent(AirtableModel): + """ + Represents a single audit log event. + + See `Audit log events `__ + for more information on how to interpret this data structure. + + To avoid namespace conflicts with the Pydantic library, the + ``modelId`` and ``modelType`` fields from the Airtable API are + represented as fields named ``object_id`` and ``object_type``. + """ + + id: str + timestamp: datetime + action: str + actor: "AuditLogActor" + object_id: str = pydantic.Field(alias="modelId") + object_type: str = pydantic.Field(alias="modelType") + payload: "AuditLogPayload" + payload_version: str + context: "AuditLogEvent.Context" + origin: "AuditLogEvent.Origin" + + class Context(AirtableModel): + base_id: Optional[str] = None + action_id: str + enterprise_account_id: str + descendant_enterprise_account_id: Optional[str] = None + interface_id: Optional[str] = None + workspace_id: Optional[str] = None + + class Origin(AirtableModel): + ip_address: str + user_agent: str + oauth_access_token_id: Optional[str] = None + personal_access_token_id: Optional[str] = None + session_id: Optional[str] = None + + +class AuditLogActor(AirtableModel): + type: str + user: Optional["AuditLogActor.UserInfo"] = None + view_id: Optional[str] = None + automation_id: Optional[str] = None + + class UserInfo(AirtableModel): + id: str + email: str + name: Optional[str] = None + + +# Placeholder until we can parse https://airtable.com/developers/web/api/audit-log-event-types +AuditLogPayload: TypeAlias = Dict[str, Any] + + +rebuild_models(vars()) diff --git a/pyairtable/models/collaborator.py b/pyairtable/models/collaborator.py index a55c0332..3e4e9051 100644 --- a/pyairtable/models/collaborator.py +++ b/pyairtable/models/collaborator.py @@ -2,7 +2,7 @@ from typing_extensions import TypeAlias -from ._base import AirtableModel +from pyairtable.models._base import AirtableModel UserId: TypeAlias = str @@ -20,7 +20,7 @@ class Collaborator(AirtableModel): id: UserId #: The email address of the user. - email: Optional[str] + email: Optional[str] = None #: The display name of the user. - name: Optional[str] + name: Optional[str] = None diff --git a/pyairtable/models/comment.py b/pyairtable/models/comment.py index 1e7d3389..2f2cc688 100644 --- a/pyairtable/models/comment.py +++ b/pyairtable/models/comment.py @@ -1,10 +1,23 @@ -from typing import Dict, Optional - -from ._base import AirtableModel, SerializableModel, update_forward_refs -from .collaborator import Collaborator - - -class Comment(SerializableModel, writable=["text"]): +from datetime import datetime +from typing import Dict, List, Optional + +import pydantic + +from pyairtable.models._base import ( + AirtableModel, + CanDeleteModel, + CanUpdateModel, + rebuild_models, +) +from pyairtable.models.collaborator import Collaborator + + +class Comment( + CanUpdateModel, + CanDeleteModel, + writable=["text"], + url="{record_url}/comments/{self.id}", +): """ A record comment that has been retrieved from the Airtable API. @@ -14,7 +27,7 @@ class Comment(SerializableModel, writable=["text"]): Comment( id='comdVMNxslc6jG0Xe', text='Hello, @[usrVMNxslc6jG0Xed]!', - created_time='2023-06-07T17:46:24.435891', + created_time=datetime.datetime(...), last_updated_time=None, mentioned={ 'usrVMNxslc6jG0Xed': Mentioned( @@ -43,40 +56,71 @@ class Comment(SerializableModel, writable=["text"]): text: str #: The ISO 8601 timestamp of when the comment was created. - created_time: str + created_time: datetime #: The ISO 8601 timestamp of when the comment was last edited. - last_updated_time: Optional[str] + last_updated_time: Optional[datetime] = None #: The account which created the comment. author: Collaborator #: Users or groups that were mentioned in the text. - mentioned: Optional[Dict[str, "Comment.Mentioned"]] + mentioned: Dict[str, "Mentioned"] = pydantic.Field(default_factory=dict) - class Mentioned(AirtableModel): - """ - A user or group that was mentioned within a comment. - Stored as a ``dict`` that is keyed by ID. - - >>> comment = table.add_comment(record_id, "Hello, @[usrVMNxslc6jG0Xed]!") - >>> comment.mentioned - { - "usrVMNxslc6jG0Xed": Mentioned( - display_name='Alice', - email='alice@example.com', - id='usrVMNxslc6jG0Xed', - type='user' - ) - } + #: The comment ID of the parent comment, if this comment is a threaded reply. + parent_comment_id: Optional[str] = None - See `User mentioned `_ for more details. - """ + #: List of reactions to this comment. + reactions: List["Reaction"] = pydantic.Field(default_factory=list) + + +class Mentioned(AirtableModel): + """ + A user or group that was mentioned within a comment. + Stored as a ``dict`` that is keyed by ID. + + >>> comment = table.add_comment(record_id, "Hello, @[usrVMNxslc6jG0Xed]!") + >>> comment.mentioned + { + "usrVMNxslc6jG0Xed": Mentioned( + display_name='Alice', + email='alice@example.com', + id='usrVMNxslc6jG0Xed', + type='user' + ) + } + + See `User mentioned `_ for more details. + """ - id: str - type: str - display_name: str + id: str + type: str + display_name: str + email: Optional[str] = None + + +class Reaction(AirtableModel): + """ + A reaction to a comment. + """ + + class EmojiInfo(AirtableModel): + unicode_character: str + + class ReactingUser(AirtableModel): + user_id: str email: Optional[str] = None + name: Optional[str] = None + + emoji_info: EmojiInfo = pydantic.Field(alias="emoji") + reacting_user: ReactingUser + + @property + def emoji(self) -> str: + """ + The emoji character used for the reaction. + """ + return chr(int(self.emoji_info.unicode_character, 16)) -update_forward_refs(vars()) +rebuild_models(vars()) diff --git a/pyairtable/models/schema.py b/pyairtable/models/schema.py new file mode 100644 index 00000000..b71162df --- /dev/null +++ b/pyairtable/models/schema.py @@ -0,0 +1,1566 @@ +import importlib +from datetime import datetime +from enum import Enum +from functools import partial +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterable, + List, + Literal, + Optional, + TypeVar, + Union, + cast, +) + +import pydantic +from typing_extensions import TypeAlias + +from pyairtable.api.types import AddCollaboratorDict +from pyairtable.models._base import ( + AirtableModel, + CanDeleteModel, + CanUpdateModel, + RestfulModel, + rebuild_models, +) + +if TYPE_CHECKING: + from pyairtable import orm + + +class FieldType(str, Enum): + """ + Enumeration of all field types supported by Airtable. + + Usage: + >>> from pyairtable.models.schema import FieldType + >>> FieldType.SINGLE_LINE_TEXT + FieldType('singleLineText') + """ + + AI_TEXT = "aiText" + AUTO_NUMBER = "autoNumber" + BARCODE = "barcode" + BUTTON = "button" + CHECKBOX = "checkbox" + COUNT = "count" + CREATED_BY = "createdBy" + CREATED_TIME = "createdTime" + CURRENCY = "currency" + DATE = "date" + DATE_TIME = "dateTime" + DURATION = "duration" + EMAIL = "email" + EXTERNAL_SYNC_SOURCE = "externalSyncSource" + FORMULA = "formula" + LAST_MODIFIED_BY = "lastModifiedBy" + LAST_MODIFIED_TIME = "lastModifiedTime" + MANUAL_SORT = "manualSort" + MULTILINE_TEXT = "multilineText" + MULTIPLE_ATTACHMENTS = "multipleAttachments" + MULTIPLE_COLLABORATORS = "multipleCollaborators" + MULTIPLE_LOOKUP_VALUES = "multipleLookupValues" + MULTIPLE_RECORD_LINKS = "multipleRecordLinks" + MULTIPLE_SELECTS = "multipleSelects" + NUMBER = "number" + PERCENT = "percent" + PHONE_NUMBER = "phoneNumber" + RATING = "rating" + RICH_TEXT = "richText" + ROLLUP = "rollup" + SINGLE_COLLABORATOR = "singleCollaborator" + SINGLE_LINE_TEXT = "singleLineText" + SINGLE_SELECT = "singleSelect" + URL = "url" + + def __repr__(self) -> str: + return f"FieldType({self.value!r})" + + +FieldSpecifier: TypeAlias = Union[str, "orm.fields.AnyField"] + +_T = TypeVar("_T", bound=Any) +_FL = partial(pydantic.Field, default_factory=list) +_FD = partial(pydantic.Field, default_factory=dict) + + +def _F(classname: str, **kwargs: Any) -> Any: + def _create_default_from_classname() -> Any: + this_module = importlib.import_module(__name__) + obj: Any = this_module + for segment in classname.split("."): + obj = getattr(obj, segment) + return obj() + + kwargs["default_factory"] = _create_default_from_classname + return pydantic.Field(**kwargs) + + +def _find(collection: List[_T], id_or_name: str) -> _T: + """ + For use on a collection model to find objects by either id or name. + """ + items_by_name: Dict[str, _T] = {} + + for item in collection: + if getattr(item, "deleted", None): + continue + if item.id == id_or_name: + return item + items_by_name[item.name] = item + + return items_by_name[id_or_name] + + +class _Collaborators(RestfulModel): + """ + Mixin for use with RestfulModel subclasses that have a /collaborators endpoint. + """ + + def add_user(self, user_id: str, permission_level: str) -> None: + """ + Add a user as a collaborator to this Airtable object. + + Args: + user_id: The user ID. + permission_level: |kwarg_permission_level| + """ + self.add("user", user_id, permission_level) + + def add_group(self, group_id: str, permission_level: str) -> None: + """ + Add a group as a collaborator to this Airtable object. + + Args: + group_id: The group ID. + permission_level: |kwarg_permission_level| + """ + self.add("group", group_id, permission_level) + + def add( + self, + collaborator_type: str, + collaborator_id: str, + permission_level: str, + ) -> None: + """ + Add a user or group as a collaborator to this Airtable object. + + Args: + collaborator_type: Either ``'user'`` or ``'group'``. + collaborator_id: The user or group ID. + permission_level: |kwarg_permission_level| + """ + if collaborator_type not in ("user", "group"): + raise ValueError("collaborator_type must be 'user' or 'group'") + self.add_collaborators( + [ + cast( + AddCollaboratorDict, + { + collaborator_type: {"id": collaborator_id}, + "permissionLevel": permission_level, + }, + ) + ] + ) + + def add_collaborators(self, collaborators: Iterable[AddCollaboratorDict]) -> None: + """ + Add multiple collaborators to this Airtable object. + + Args: + collaborators: A list of ``dict`` that conform to the specification + laid out in the `Add base collaborator `__ + API documentation. + """ + payload = {"collaborators": list(collaborators)} + self._api.post(f"{self._url}/collaborators", json=payload) + self._reload() + + def update(self, collaborator_id: str, permission_level: str) -> None: + """ + Change the permission level granted to a user or group. + + Args: + collaborator_id: The user or group ID. + permission_level: |kwarg_permission_level| + """ + self._api.patch( + f"{self._url}/collaborators/{collaborator_id}", + json={"permissionLevel": permission_level}, + ) + + def remove(self, collaborator_id: str) -> None: + """ + Remove a user or group as a collaborator. + + Args: + collaborator_id: The user or group ID. + """ + self._api.delete(f"{self._url}/collaborators/{collaborator_id}") + + +class Bases(AirtableModel): + """ + The list of bases visible to the API token. + + See https://airtable.com/developers/web/api/list-bases + """ + + bases: List["Bases.Info"] = _FL() + + def base(self, base_id: str) -> "Bases.Info": + """ + Get basic information about the base with the given ID. + """ + return _find(self.bases, base_id) + + class Info(AirtableModel): + id: str + name: str + permission_level: str + + +class BaseCollaborators(_Collaborators, url="meta/bases/{base.id}"): + """ + Detailed information about who can access a base. + + See https://airtable.com/developers/web/api/get-base-collaborators + """ + + id: str + name: str + created_time: datetime + permission_level: str + workspace_id: str + interfaces: Dict[str, "BaseCollaborators.InterfaceCollaborators"] = _FD() + group_collaborators: "BaseCollaborators.GroupCollaborators" = _F("BaseCollaborators.GroupCollaborators") # fmt: skip + individual_collaborators: "BaseCollaborators.IndividualCollaborators" = _F("BaseCollaborators.IndividualCollaborators") # fmt: skip + invite_links: "BaseCollaborators.InviteLinks" = _F("BaseCollaborators.InviteLinks") # fmt: skip + sensitivity_label: Optional["BaseCollaborators.SensitivityLabel"] = None + + class InterfaceCollaborators( + _Collaborators, + url="meta/bases/{base.id}/interfaces/{key}", + ): + id: str + name: str + created_time: datetime + first_publish_time: Optional[datetime] = None + group_collaborators: List["GroupCollaborator"] = _FL() + individual_collaborators: List["IndividualCollaborator"] = _FL() + invite_links: List["InterfaceInviteLink"] = _FL() + + class GroupCollaborators(AirtableModel): + via_base: List["GroupCollaborator"] = _FL(alias="baseCollaborators") + via_workspace: List["GroupCollaborator"] = _FL(alias="workspaceCollaborators") + + class IndividualCollaborators(AirtableModel): + via_base: List["IndividualCollaborator"] = _FL(alias="baseCollaborators") + via_workspace: List["IndividualCollaborator"] = _FL(alias="workspaceCollaborators") # fmt: skip + + class InviteLinks(RestfulModel, url="{base_collaborators._url}/invites"): + via_base: List["InviteLink"] = _FL(alias="baseInviteLinks") + via_workspace: List["WorkspaceInviteLink"] = _FL(alias="workspaceInviteLinks") # fmt: skip + + class SensitivityLabel(AirtableModel): + id: str + description: str + name: str + + +class BaseShares(AirtableModel): + """ + Collection of shared views in a base. + + See https://airtable.com/developers/web/api/list-shares + """ + + shares: List["BaseShares.Info"] + + class Info( + CanUpdateModel, + CanDeleteModel, + url="meta/bases/{base.id}/shares/{self.share_id}", + writable=["state"], + reload_after_save=False, + ): + state: str + created_by_user_id: str + created_time: datetime + share_id: str + type: str + can_be_synced: Optional[bool] = None + is_password_protected: bool + block_installation_id: Optional[str] = None + restricted_to_email_domains: List[str] = _FL() + restricted_to_enterprise_members: bool + view_id: Optional[str] = None + effective_email_domain_allow_list: List[str] = _FL() + + def enable(self) -> None: + """ + Enable the base share. + """ + self.state = "enabled" + self.save() + + def disable(self) -> None: + """ + Disable the base share. + """ + self.state = "disabled" + self.save() + + +class BaseSchema(AirtableModel): + """ + Schema of all tables within the base. + + See https://airtable.com/developers/web/api/get-base-schema + + Usage: + >>> schema = api.base(base_id).schema() + >>> schema.tables + [TableSchema(...), ...] + >>> schema.table("Table Name") + TableSchema( + id='tbl6jG0XedVMNxFQW', + name='Table Name', + primary_field_id='fld0XedVMNxFQW6jG', + description=None, + fields=[...], + views=[...] + ) + """ + + tables: List["TableSchema"] + + def table(self, id_or_name: str) -> "TableSchema": + """ + Get the schema for the table with the given ID or name. + """ + return _find(self.tables, id_or_name) + + +class TableSchema( + CanUpdateModel, + save_null_values=False, + writable=["name", "description", "date_dependency"], + url="meta/bases/{base.id}/tables/{self.id}", +): + """ + Metadata for a table. + + See https://airtable.com/developers/web/api/get-base-schema + + Usage: + >>> schema = base.table("Table Name").schema() + >>> schema.id + 'tbl6clmhESAtaCCwF' + >>> schema.name + 'Table Name' + + >>> schema.fields + [FieldSchema(...), ...] + >>> schema().field("fld6jG0XedVMNxFQW") + SingleLineTextFieldSchema( + id='fld6jG0XedVMNxFQW', + name='Name', + type='singleLineText' + ) + + >>> schema.views + [ViewSchema(...), ...] + >>> schema().view("View Name") + ViewSchema( + id='viw6jG0XedVMNxFQW', + name='My Grid View', + type='grid' + ) + """ + + id: str + name: str + primary_field_id: str + description: Optional[str] = None + fields: List["FieldSchema"] + views: List["ViewSchema"] + date_dependency: Optional["DateDependency"] = pydantic.Field( + alias="dateDependencySettings", default=None + ) + + def field(self, id_or_name: FieldSpecifier) -> "FieldSchema": + """ + Get the schema for the field with the given ID or name. + """ + from pyairtable import orm + + if isinstance(id_or_name, orm.fields.Field): + id_or_name = id_or_name.field_name + return _find(self.fields, id_or_name) + + def view(self, id_or_name: str) -> "ViewSchema": + """ + Get the schema for the view with the given ID or name. + """ + return _find(self.views, id_or_name) + + def set_date_dependency( + self, + start_date_field: FieldSpecifier, + end_date_field: FieldSpecifier, + duration_field: FieldSpecifier, + rescheduling_mode: str, + predecessor_field: Optional[FieldSpecifier] = None, + skip_weekends_and_holidays: bool = False, + holidays: Optional[List[str]] = None, + ) -> None: + """ + Create or replace the `date dependency settings `__ + for the table. You still need to call :meth:`~TableSchema.save` to persist the changes. + + Usage: + >>> table_schema = base.table("Table Name").schema() + >>> table_schema.set_date_dependency( + ... start_date_field="Start Date", + ... end_date_field="End Date", + ... duration_field="Duration", + ... rescheduling_mode="flexible", + ... skip_weekends_and_holidays=True, + ... holidays=["2026-01-01", "2026-12-25"], + ... predecessor_field="Depends On", + ... ) + >>> table_schema.save() + + This method also accepts ORM model fields as shorthand for those fields' IDs: + + >>> table_schema = SomeModel.meta.table.schema() + >>> table_schema.set_date_dependency( + ... start_date_field=SomeModel.start_date, + ... end_date_field=SomeModel.end_date, + ... duration_field=SomeModel.duration, + ... rescheduling_mode="flexible", + ... ) + >>> table_schema.save() + + Args: + start_date_field: The field ID or name for the start date. + end_date_field: The field ID or name for the end date. + duration_field: The field ID or name for the duration. + rescheduling_mode: Either "flexible", "fixed", or "none". + skip_weekends_and_holidays: Whether to skip weekends and holidays. + holidays: A list of holiday dates in ISO format (YYYY-MM-DD). + predecessor_field: Optional; the field ID or name for predecessor tasks. + """ + duration_field = self.field(duration_field).id + start_date_field = self.field(start_date_field).id + end_date_field = self.field(end_date_field).id + if predecessor_field is not None: + predecessor_field = self.field(predecessor_field).id + + self.date_dependency = TableSchema.DateDependency( + is_enabled=True, + duration_field_id=duration_field, + start_date_field_id=start_date_field, + end_date_field_id=end_date_field, + predecessor_field_id=predecessor_field, + rescheduling_mode=rescheduling_mode, + should_skip_weekends_and_holidays=skip_weekends_and_holidays, + holidays=holidays or [], + ) + + class DateDependency(AirtableModel): + """ + Settings for date dependencies in the table. + + See https://airtable.com/developers/web/api/model/date-dependency-settings + """ + + is_enabled: bool + duration_field_id: str + start_date_field_id: str + end_date_field_id: str + predecessor_field_id: Optional[str] = None + rescheduling_mode: str + should_skip_weekends_and_holidays: bool + holidays: List[str] = _FL() + + +class ViewSchema(CanDeleteModel, url="meta/bases/{base.id}/views/{self.id}"): + """ + Metadata for a view. + + See https://airtable.com/developers/web/api/get-view-metadata + + Usage: + >>> vw = table.schema().view("View name") + >>> vw.name + 'View name' + >>> vw.type + 'grid' + >>> vw.delete() + """ + + id: str + type: str + name: str + personal_for_user_id: Optional[str] = None + visible_field_ids: Optional[List[str]] = None + + +class GroupCollaborator(AirtableModel): + created_time: datetime + granted_by_user_id: str + group_id: str + name: str + permission_level: str + + +class IndividualCollaborator(AirtableModel): + created_time: datetime + granted_by_user_id: str + user_id: str + email: str + permission_level: str + + +class BaseIndividualCollaborator(IndividualCollaborator): + base_id: str + + +class BaseGroupCollaborator(GroupCollaborator): + base_id: str + + +# URL generation for an InviteLink assumes that it is nested within +# a RestfulModel class named "InviteLink" that provides URL context. +class InviteLink(CanDeleteModel, url="{invite_links._url}/{self.id}"): + """ + Represents an `invite link `__. + """ + + id: str + type: str + created_time: datetime + invited_email: Optional[str] = None + referred_by_user_id: str + permission_level: str + restricted_to_email_domains: List[str] = _FL() + + +class BaseInviteLink( + InviteLink, + url="meta/bases/{self.base_id}/invites/{self.id}", +): + """ + Represents a `base invite link `__. + """ + + base_id: str + + +class WorkspaceInviteLink( + InviteLink, + url="meta/workspaces/{base_collaborators.workspace_id}/invites/{self.id}", +): + """ + Represents an `invite link `__ + to a workspace that was returned within a base schema. + """ + + +class InterfaceInviteLink( + InviteLink, + url="{interface_collaborators._url}/invites/{self.id}", +): + """ + Represents an `invite link `__ + to an interface that was returned within a base schema. + """ + + +class EnterpriseInfo(AirtableModel): + """ + Information about groups, users, workspaces, and email domains + associated with an enterprise account. + + See https://airtable.com/developers/web/api/get-enterprise + """ + + id: str + created_time: datetime + group_ids: List[str] + user_ids: List[str] + workspace_ids: List[str] + email_domains: List["EnterpriseInfo.EmailDomain"] + root_enterprise_id: str = pydantic.Field(alias="rootEnterpriseAccountId") + descendant_enterprise_ids: List[str] = _FL(alias="descendantEnterpriseAccountIds") + aggregated: Optional["EnterpriseInfo.AggregatedIds"] = None + descendants: Dict[str, "EnterpriseInfo.AggregatedIds"] = _FD() + + class EmailDomain(AirtableModel): + email_domain: str + is_sso_required: bool + + class AggregatedIds(AirtableModel): + group_ids: List[str] = _FL() + user_ids: List[str] = _FL() + workspace_ids: List[str] = _FL() + + +class WorkspaceCollaborators(_Collaborators, url="meta/workspaces/{self.id}"): + """ + Detailed information about who can access a workspace. + + See https://airtable.com/developers/web/api/get-workspace-collaborators + """ + + id: str + name: str + created_time: datetime + base_ids: List[str] + restrictions: "WorkspaceCollaborators.Restrictions" = pydantic.Field(alias="workspaceRestrictions") # fmt: skip + group_collaborators: "WorkspaceCollaborators.GroupCollaborators" = _F("WorkspaceCollaborators.GroupCollaborators") # fmt: skip + individual_collaborators: "WorkspaceCollaborators.IndividualCollaborators" = _F("WorkspaceCollaborators.IndividualCollaborators") # fmt: skip + invite_links: "WorkspaceCollaborators.InviteLinks" = _F("WorkspaceCollaborators.InviteLinks") # fmt: skip + + class Restrictions( + CanUpdateModel, + url="{workspace_collaborators._url}/updateRestrictions", + save_method="POST", + reload_after_save=False, + ): + invite_creation: str = pydantic.Field(alias="inviteCreationRestriction") + share_creation: str = pydantic.Field(alias="shareCreationRestriction") + + class GroupCollaborators(AirtableModel): + via_base: List["BaseGroupCollaborator"] = _FL(alias="baseCollaborators") + via_workspace: List["GroupCollaborator"] = _FL(alias="workspaceCollaborators") + + class IndividualCollaborators(AirtableModel): + via_base: List["BaseIndividualCollaborator"] = _FL(alias="baseCollaborators") + via_workspace: List["IndividualCollaborator"] = _FL( + alias="workspaceCollaborators" + ) + + class InviteLinks(RestfulModel, url="{workspace_collaborators._url}/invites"): + via_base: List["BaseInviteLink"] = _FL(alias="baseInviteLinks") + via_workspace: List["InviteLink"] = _FL(alias="workspaceInviteLinks") + + +class NestedId(AirtableModel): + id: str + + +class NestedFieldId(AirtableModel): + field_id: str + + +class Collaborations(AirtableModel): + """ + The full set of collaborations granted to a user or user group. + + See https://airtable.com/developers/web/api/model/collaborations + """ + + base_collaborations: List["Collaborations.BaseCollaboration"] = _FL() + interface_collaborations: List["Collaborations.InterfaceCollaboration"] = _FL() + workspace_collaborations: List["Collaborations.WorkspaceCollaboration"] = _FL() + + def __bool__(self) -> bool: + return bool( + self.base_collaborations + or self.interface_collaborations + or self.workspace_collaborations + ) + + @property + def bases(self) -> Dict[str, "Collaborations.BaseCollaboration"]: + """ + Mapping of base IDs to collaborations, to make lookups easier. + """ + return {c.base_id: c for c in self.base_collaborations} + + @property + def interfaces(self) -> Dict[str, "Collaborations.InterfaceCollaboration"]: + """ + Mapping of interface IDs to collaborations, to make lookups easier. + """ + return {c.interface_id: c for c in self.interface_collaborations} + + @property + def workspaces(self) -> Dict[str, "Collaborations.WorkspaceCollaboration"]: + """ + Mapping of workspace IDs to collaborations, to make lookups easier. + """ + return {c.workspace_id: c for c in self.workspace_collaborations} + + class BaseCollaboration(AirtableModel): + base_id: str + created_time: datetime + granted_by_user_id: str + permission_level: str + + class InterfaceCollaboration(BaseCollaboration): + interface_id: str + + class WorkspaceCollaboration(AirtableModel): + workspace_id: str + created_time: datetime + granted_by_user_id: str + permission_level: str + + +class UserInfo( + CanUpdateModel, + CanDeleteModel, + url="{enterprise.urls.users}/{self.id}", + writable=["state", "email", "first_name", "last_name"], +): + """ + Detailed information about a user. + + See https://airtable.com/developers/web/api/get-user-by-id + """ + + id: str + name: str + email: str + state: str + is_service_account: bool + is_sso_required: bool + is_two_factor_auth_enabled: bool + last_activity_time: Optional[datetime] = None + created_time: Optional[datetime] = None + license_type: Optional[str] = None + enterprise_user_type: Optional[str] = None + invited_to_airtable_by_user_id: Optional[str] = None + is_managed: bool = False + is_admin: bool = False + is_super_admin: bool = False + groups: List[NestedId] = _FL() + collaborations: "Collaborations" = _F("Collaborations") + descendants: Dict[str, "UserInfo.DescendantIds"] = _FD() + aggregated: Optional["UserInfo.AggregatedIds"] = None + + def logout(self) -> None: + self._api.post(self._url + "/logout") + + class DescendantIds(AirtableModel): + license_type: Optional[str] = None + last_activity_time: Optional[datetime] = None + collaborations: Optional["Collaborations"] = None + is_admin: bool = False + is_managed: bool = False + groups: List[NestedId] = _FL() + + class AggregatedIds(AirtableModel): + license_type: Optional[str] = None + last_activity_time: Optional[datetime] = None + collaborations: Optional["Collaborations"] = None + is_admin: bool = False + groups: List[NestedId] = _FL() + + +class UserGroup(AirtableModel): + """ + Detailed information about a user group and its members. + + See https://airtable.com/developers/web/api/get-user-group + """ + + id: str + name: str + enterprise_account_id: str + created_time: datetime + updated_time: datetime + members: List["UserGroup.Member"] + collaborations: "Collaborations" = _F("Collaborations") + mapped_user_license_type: Optional[str] = None + + class Member(AirtableModel): + user_id: str + email: str + first_name: str + last_name: str + role: str + created_time: datetime + + +# The data model is a bit confusing here, but it's designed for maximum reuse. +# SomethingFieldConfig contains the `type` and `options` values for each field type. +# _FieldSchemaBase contains the `id`, `name`, and `description` values. +# SomethingFieldSchema inherits from _FieldSchemaBase and SomethingFieldConfig. +# FieldConfig is a union of all available *FieldConfig classes. +# FieldSchema is a union of all available *FieldSchema classes. + + +class AITextFieldConfig(AirtableModel): + """ + Field configuration for `AI text `__. + """ + + type: Literal[FieldType.AI_TEXT] + options: "AITextFieldOptions" + + +class AITextFieldOptions(AirtableModel): + prompt: List[Union[str, "AITextFieldOptions.PromptField"]] = _FL() + referenced_field_ids: List[str] = _FL() + + class PromptField(AirtableModel): + field: NestedFieldId + + +class AutoNumberFieldConfig(AirtableModel): + """ + Field configuration for `Auto number `__. + """ + + type: Literal[FieldType.AUTO_NUMBER] + + +class BarcodeFieldConfig(AirtableModel): + """ + Field configuration for `Barcode `__. + """ + + type: Literal[FieldType.BARCODE] + + +class ButtonFieldConfig(AirtableModel): + """ + Field configuration for `Button `__. + """ + + type: Literal[FieldType.BUTTON] + + +class CheckboxFieldConfig(AirtableModel): + """ + Field configuration for `Checkbox `__. + """ + + type: Literal[FieldType.CHECKBOX] + options: "CheckboxFieldOptions" + + +class CheckboxFieldOptions(AirtableModel): + color: str + icon: str + + +class CountFieldConfig(AirtableModel): + """ + Field configuration for `Count `__. + """ + + type: Literal[FieldType.COUNT] + options: "CountFieldOptions" + + +class CountFieldOptions(AirtableModel): + is_valid: bool + record_link_field_id: Optional[str] = None + + +class CreatedByFieldConfig(AirtableModel): + """ + Field configuration for `Created by `__. + """ + + type: Literal[FieldType.CREATED_BY] + + +class CreatedTimeFieldConfig(AirtableModel): + """ + Field configuration for `Created time `__. + """ + + type: Literal[FieldType.CREATED_TIME] + + +class CurrencyFieldConfig(AirtableModel): + """ + Field configuration for `Currency `__. + """ + + type: Literal[FieldType.CURRENCY] + options: "CurrencyFieldOptions" + + +class CurrencyFieldOptions(AirtableModel): + precision: int + symbol: str + + +class DateFieldConfig(AirtableModel): + """ + Field configuration for `Date `__. + """ + + type: Literal[FieldType.DATE] + options: "DateFieldOptions" + + +class DateFieldOptions(AirtableModel): + date_format: "DateTimeFieldOptions.DateFormat" + + +class DateTimeFieldConfig(AirtableModel): + """ + Field configuration for `Date and time `__. + """ + + type: Literal[FieldType.DATE_TIME] + options: "DateTimeFieldOptions" + + +class DateTimeFieldOptions(AirtableModel): + time_zone: str + date_format: "DateTimeFieldOptions.DateFormat" + time_format: "DateTimeFieldOptions.TimeFormat" + + class DateFormat(AirtableModel): + format: str + name: str + + class TimeFormat(AirtableModel): + format: str + name: str + + +class DurationFieldConfig(AirtableModel): + """ + Field configuration for `Duration `__. + """ + + type: Literal[FieldType.DURATION] + options: "DurationFieldOptions" + + +class DurationFieldOptions(AirtableModel): + duration_format: str + + +class EmailFieldConfig(AirtableModel): + """ + Field configuration for `Email `__. + """ + + type: Literal[FieldType.EMAIL] + + +class ExternalSyncSourceFieldConfig(AirtableModel): + """ + Field configuration for `Sync source `__. + """ + + type: Literal[FieldType.EXTERNAL_SYNC_SOURCE] + options: "SingleSelectFieldOptions" + + +class FormulaFieldConfig(AirtableModel): + """ + Field configuration for `Formula `__. + """ + + type: Literal[FieldType.FORMULA] + options: "FormulaFieldOptions" + + +class FormulaFieldOptions(AirtableModel): + formula: str + is_valid: bool + referenced_field_ids: Optional[List[str]] = None + result: Optional["FieldConfig"] = None + + +class LastModifiedByFieldConfig(AirtableModel): + """ + Field configuration for `Last modified by `__. + """ + + type: Literal[FieldType.LAST_MODIFIED_BY] + + +class LastModifiedTimeFieldConfig(AirtableModel): + """ + Field configuration for `Last modified time `__. + """ + + type: Literal[FieldType.LAST_MODIFIED_TIME] + options: "LastModifiedTimeFieldOptions" + + +class LastModifiedTimeFieldOptions(AirtableModel): + is_valid: bool + referenced_field_ids: Optional[List[str]] = None + result: Optional[Union["DateFieldConfig", "DateTimeFieldConfig"]] = None + + +class ManualSortFieldConfig(AirtableModel): + """ + Field configuration for ``manualSort`` field type (not documented). + """ + + type: Literal[FieldType.MANUAL_SORT] + + +class MultilineTextFieldConfig(AirtableModel): + """ + Field configuration for `Long text `__. + """ + + type: Literal[FieldType.MULTILINE_TEXT] + + +class MultipleAttachmentsFieldConfig(AirtableModel): + """ + Field configuration for `Attachments `__. + """ + + type: Literal[FieldType.MULTIPLE_ATTACHMENTS] + options: "MultipleAttachmentsFieldOptions" + + +class MultipleAttachmentsFieldOptions(AirtableModel): + """ + Field configuration for `Attachments `__. + """ + + is_reversed: bool + + +class MultipleCollaboratorsFieldConfig(AirtableModel): + """ + Field configuration for `Multiple Collaborators `__. + """ + + type: Literal[FieldType.MULTIPLE_COLLABORATORS] + + +class MultipleLookupValuesFieldConfig(AirtableModel): + """ + Field configuration for `Lookup __`. + """ + + type: Literal[FieldType.MULTIPLE_LOOKUP_VALUES] + options: "MultipleLookupValuesFieldOptions" + + +class MultipleLookupValuesFieldOptions(AirtableModel): + is_valid: bool + field_id_in_linked_table: Optional[str] = None + record_link_field_id: Optional[str] = None + result: Optional["FieldConfig"] = None + + +class MultipleRecordLinksFieldConfig(AirtableModel): + """ + Field configuration for `Link to another record __`. + """ + + type: Literal[FieldType.MULTIPLE_RECORD_LINKS] + options: "MultipleRecordLinksFieldOptions" + + +class MultipleRecordLinksFieldOptions(AirtableModel): + is_reversed: bool + linked_table_id: str + prefers_single_record_link: bool + inverse_link_field_id: Optional[str] = None + view_id_for_record_selection: Optional[str] = None + + +class MultipleSelectsFieldConfig(AirtableModel): + """ + Field configuration for `Multiple select `__. + """ + + type: Literal[FieldType.MULTIPLE_SELECTS] + options: "SingleSelectFieldOptions" + + +class NumberFieldConfig(AirtableModel): + """ + Field configuration for `Number `__. + """ + + type: Literal[FieldType.NUMBER] + options: "NumberFieldOptions" + + +class NumberFieldOptions(AirtableModel): + precision: int + + +class PercentFieldConfig(AirtableModel): + """ + Field configuration for `Percent `__. + """ + + type: Literal[FieldType.PERCENT] + options: "NumberFieldOptions" + + +class PhoneNumberFieldConfig(AirtableModel): + """ + Field configuration for `Phone `__. + """ + + type: Literal[FieldType.PHONE_NUMBER] + + +class RatingFieldConfig(AirtableModel): + """ + Field configuration for `Rating `__. + """ + + type: Literal[FieldType.RATING] + options: "RatingFieldOptions" + + +class RatingFieldOptions(AirtableModel): + color: str + icon: str + max: int + + +class RichTextFieldConfig(AirtableModel): + """ + Field configuration for `Rich text `__. + """ + + type: Literal[FieldType.RICH_TEXT] + + +class RollupFieldConfig(AirtableModel): + """ + Field configuration for `Rollup __`. + """ + + type: Literal[FieldType.ROLLUP] + options: "RollupFieldOptions" + + +class RollupFieldOptions(AirtableModel): + field_id_in_linked_table: Optional[str] = None + is_valid: bool + record_link_field_id: Optional[str] = None + referenced_field_ids: Optional[List[str]] = None + result: Optional["FieldConfig"] = None + + +class SingleCollaboratorFieldConfig(AirtableModel): + """ + Field configuration for `Collaborator `__. + """ + + type: Literal[FieldType.SINGLE_COLLABORATOR] + + +class SingleLineTextFieldConfig(AirtableModel): + """ + Field configuration for `Single line text `__. + """ + + type: Literal[FieldType.SINGLE_LINE_TEXT] + + +class SingleSelectFieldConfig(AirtableModel): + """ + Field configuration for `Single select `__. + """ + + type: Literal[FieldType.SINGLE_SELECT] + options: "SingleSelectFieldOptions" + + +class SingleSelectFieldOptions(AirtableModel): + choices: List["SingleSelectFieldOptions.Choice"] + + class Choice(AirtableModel): + id: str + name: str + color: Optional[str] = None + + +class UrlFieldConfig(AirtableModel): + """ + Field configuration for `Url `__. + """ + + type: Literal[FieldType.URL] + + +class UnknownFieldConfig(AirtableModel): + """ + Field configuration class used as a fallback for unrecognized types. + This ensures we don't raise pydantic.ValidationError if Airtable adds new types. + """ + + type: str + options: Optional[Dict[str, Any]] = None + + +class _FieldSchemaBase( + CanUpdateModel, + save_null_values=False, + writable=["name", "description"], + url="meta/bases/{base.id}/tables/{table_schema.id}/fields/{self.id}", +): + id: str + name: str + description: Optional[str] = None + + +# This section is auto-generated so that FieldSchema and FieldConfig are kept aligned. +# See .pre-commit-config.yaml, or just run `tox -e pre-commit` to refresh it. +# fmt: off +r"""[[[cog]]] + +import re +with open(cog.inFile) as fp: + field_types = re.findall( + r"class (\w+Field)Config\(.*?\):(?:\n \"{3}(.*?)\"{3})?", + fp.read(), + re.MULTILINE + re.DOTALL + ) + +cog.out("\n\n") + +cog.outl("FieldConfig: TypeAlias = Union[") +for fld, _ in field_types: + cog.outl(f" {fld}Config,") +cog.outl("]") +cog.out("\n\n") + +for fld, doc in field_types: + cog.out(f"class {fld}Schema(_FieldSchemaBase, {fld}Config):\n ") + if doc: + doc = doc.replace('ield configuration', 'ield schema') + cog.outl("\"\"\"" + doc + "\"\"\"") + else: + cog.outl("pass") + cog.out("\n\n") + +cog.outl("FieldSchema: TypeAlias = Union[") +for fld, _ in field_types: + cog.outl(f" {fld}Schema,") +cog.outl("]") + +[[[out]]]""" + + +FieldConfig: TypeAlias = Union[ + AITextFieldConfig, + AutoNumberFieldConfig, + BarcodeFieldConfig, + ButtonFieldConfig, + CheckboxFieldConfig, + CountFieldConfig, + CreatedByFieldConfig, + CreatedTimeFieldConfig, + CurrencyFieldConfig, + DateFieldConfig, + DateTimeFieldConfig, + DurationFieldConfig, + EmailFieldConfig, + ExternalSyncSourceFieldConfig, + FormulaFieldConfig, + LastModifiedByFieldConfig, + LastModifiedTimeFieldConfig, + ManualSortFieldConfig, + MultilineTextFieldConfig, + MultipleAttachmentsFieldConfig, + MultipleCollaboratorsFieldConfig, + MultipleLookupValuesFieldConfig, + MultipleRecordLinksFieldConfig, + MultipleSelectsFieldConfig, + NumberFieldConfig, + PercentFieldConfig, + PhoneNumberFieldConfig, + RatingFieldConfig, + RichTextFieldConfig, + RollupFieldConfig, + SingleCollaboratorFieldConfig, + SingleLineTextFieldConfig, + SingleSelectFieldConfig, + UrlFieldConfig, + UnknownFieldConfig, +] + + +class AITextFieldSchema(_FieldSchemaBase, AITextFieldConfig): + """ + Field schema for `AI text `__. + """ + + +class AutoNumberFieldSchema(_FieldSchemaBase, AutoNumberFieldConfig): + """ + Field schema for `Auto number `__. + """ + + +class BarcodeFieldSchema(_FieldSchemaBase, BarcodeFieldConfig): + """ + Field schema for `Barcode `__. + """ + + +class ButtonFieldSchema(_FieldSchemaBase, ButtonFieldConfig): + """ + Field schema for `Button `__. + """ + + +class CheckboxFieldSchema(_FieldSchemaBase, CheckboxFieldConfig): + """ + Field schema for `Checkbox `__. + """ + + +class CountFieldSchema(_FieldSchemaBase, CountFieldConfig): + """ + Field schema for `Count `__. + """ + + +class CreatedByFieldSchema(_FieldSchemaBase, CreatedByFieldConfig): + """ + Field schema for `Created by `__. + """ + + +class CreatedTimeFieldSchema(_FieldSchemaBase, CreatedTimeFieldConfig): + """ + Field schema for `Created time `__. + """ + + +class CurrencyFieldSchema(_FieldSchemaBase, CurrencyFieldConfig): + """ + Field schema for `Currency `__. + """ + + +class DateFieldSchema(_FieldSchemaBase, DateFieldConfig): + """ + Field schema for `Date `__. + """ + + +class DateTimeFieldSchema(_FieldSchemaBase, DateTimeFieldConfig): + """ + Field schema for `Date and time `__. + """ + + +class DurationFieldSchema(_FieldSchemaBase, DurationFieldConfig): + """ + Field schema for `Duration `__. + """ + + +class EmailFieldSchema(_FieldSchemaBase, EmailFieldConfig): + """ + Field schema for `Email `__. + """ + + +class ExternalSyncSourceFieldSchema(_FieldSchemaBase, ExternalSyncSourceFieldConfig): + """ + Field schema for `Sync source `__. + """ + + +class FormulaFieldSchema(_FieldSchemaBase, FormulaFieldConfig): + """ + Field schema for `Formula `__. + """ + + +class LastModifiedByFieldSchema(_FieldSchemaBase, LastModifiedByFieldConfig): + """ + Field schema for `Last modified by `__. + """ + + +class LastModifiedTimeFieldSchema(_FieldSchemaBase, LastModifiedTimeFieldConfig): + """ + Field schema for `Last modified time `__. + """ + + +class ManualSortFieldSchema(_FieldSchemaBase, ManualSortFieldConfig): + """ + Field schema for ``manualSort`` field type (not documented). + """ + + +class MultilineTextFieldSchema(_FieldSchemaBase, MultilineTextFieldConfig): + """ + Field schema for `Long text `__. + """ + + +class MultipleAttachmentsFieldSchema(_FieldSchemaBase, MultipleAttachmentsFieldConfig): + """ + Field schema for `Attachments `__. + """ + + +class MultipleCollaboratorsFieldSchema(_FieldSchemaBase, MultipleCollaboratorsFieldConfig): + """ + Field schema for `Multiple Collaborators `__. + """ + + +class MultipleLookupValuesFieldSchema(_FieldSchemaBase, MultipleLookupValuesFieldConfig): + """ + Field schema for `Lookup __`. + """ + + +class MultipleRecordLinksFieldSchema(_FieldSchemaBase, MultipleRecordLinksFieldConfig): + """ + Field schema for `Link to another record __`. + """ + + +class MultipleSelectsFieldSchema(_FieldSchemaBase, MultipleSelectsFieldConfig): + """ + Field schema for `Multiple select `__. + """ + + +class NumberFieldSchema(_FieldSchemaBase, NumberFieldConfig): + """ + Field schema for `Number `__. + """ + + +class PercentFieldSchema(_FieldSchemaBase, PercentFieldConfig): + """ + Field schema for `Percent `__. + """ + + +class PhoneNumberFieldSchema(_FieldSchemaBase, PhoneNumberFieldConfig): + """ + Field schema for `Phone `__. + """ + + +class RatingFieldSchema(_FieldSchemaBase, RatingFieldConfig): + """ + Field schema for `Rating `__. + """ + + +class RichTextFieldSchema(_FieldSchemaBase, RichTextFieldConfig): + """ + Field schema for `Rich text `__. + """ + + +class RollupFieldSchema(_FieldSchemaBase, RollupFieldConfig): + """ + Field schema for `Rollup __`. + """ + + +class SingleCollaboratorFieldSchema(_FieldSchemaBase, SingleCollaboratorFieldConfig): + """ + Field schema for `Collaborator `__. + """ + + +class SingleLineTextFieldSchema(_FieldSchemaBase, SingleLineTextFieldConfig): + """ + Field schema for `Single line text `__. + """ + + +class SingleSelectFieldSchema(_FieldSchemaBase, SingleSelectFieldConfig): + """ + Field schema for `Single select `__. + """ + + +class UrlFieldSchema(_FieldSchemaBase, UrlFieldConfig): + """ + Field schema for `Url `__. + """ + + +class UnknownFieldSchema(_FieldSchemaBase, UnknownFieldConfig): + """ + Field schema class used as a fallback for unrecognized types. + This ensures we don't raise pydantic.ValidationError if Airtable adds new types. + """ + + +FieldSchema: TypeAlias = Union[ + AITextFieldSchema, + AutoNumberFieldSchema, + BarcodeFieldSchema, + ButtonFieldSchema, + CheckboxFieldSchema, + CountFieldSchema, + CreatedByFieldSchema, + CreatedTimeFieldSchema, + CurrencyFieldSchema, + DateFieldSchema, + DateTimeFieldSchema, + DurationFieldSchema, + EmailFieldSchema, + ExternalSyncSourceFieldSchema, + FormulaFieldSchema, + LastModifiedByFieldSchema, + LastModifiedTimeFieldSchema, + ManualSortFieldSchema, + MultilineTextFieldSchema, + MultipleAttachmentsFieldSchema, + MultipleCollaboratorsFieldSchema, + MultipleLookupValuesFieldSchema, + MultipleRecordLinksFieldSchema, + MultipleSelectsFieldSchema, + NumberFieldSchema, + PercentFieldSchema, + PhoneNumberFieldSchema, + RatingFieldSchema, + RichTextFieldSchema, + RollupFieldSchema, + SingleCollaboratorFieldSchema, + SingleLineTextFieldSchema, + SingleSelectFieldSchema, + UrlFieldSchema, + UnknownFieldSchema, +] +# [[[end]]] (sum: yhWbyMdrHR) +# fmt: on + + +# Shortcut to allow parsing unions, which is not possible otherwise in Pydantic v1. +# See https://github.com/pydantic/pydantic/discussions/4950 +class _HasFieldSchema(AirtableModel): + field_schema: FieldSchema + + +def parse_field_schema(obj: Dict[str, Any]) -> FieldSchema: + """ + Given a ``dict`` representing a field schema, + parse it into the appropriate FieldSchema subclass. + """ + return _HasFieldSchema.model_validate({"field_schema": obj}).field_schema + + +rebuild_models(vars()) diff --git a/pyairtable/models/webhook.py b/pyairtable/models/webhook.py index 61a04e99..b1e0734c 100644 --- a/pyairtable/models/webhook.py +++ b/pyairtable/models/webhook.py @@ -1,21 +1,21 @@ import base64 +from datetime import datetime from functools import partial from hmac import HMAC -from typing import Any, Callable, Dict, Iterator, List, Optional, Union +from typing import Any, Callable, Dict, Iterator, List, Literal, Optional, Union +import pydantic from typing_extensions import Self as SelfType -from pyairtable._compat import pydantic from pyairtable.api.types import RecordId - -from ._base import AirtableModel, SerializableModel, update_forward_refs +from pyairtable.models._base import AirtableModel, CanDeleteModel, rebuild_models # Shortcuts to avoid lots of line wrapping FD: Callable[[], Any] = partial(pydantic.Field, default_factory=dict) FL: Callable[[], Any] = partial(pydantic.Field, default_factory=list) -class Webhook(SerializableModel, allow_update=False): +class Webhook(CanDeleteModel, url="bases/{base.id}/webhooks/{self.id}"): """ A webhook that has been retrieved from the Airtable API. @@ -30,7 +30,7 @@ class Webhook(SerializableModel, allow_update=False): CreateWebhookResponse( id='ach00000000000001', mac_secret_base64='c3VwZXIgZHVwZXIgc2VjcmV0', - expiration_time='2023-07-01T00:00:00.000Z' + expiration_time=datetime.datetime(...) ) >>> webhooks = base.webhooks() >>> webhooks[0] @@ -42,7 +42,7 @@ class Webhook(SerializableModel, allow_update=False): last_successful_notification_time=None, notification_url="https://example.com", last_notification_result=None, - expiration_time="2023-07-01T00:00:00.000Z", + expiration_time=datetime.datetime(...), specification: WebhookSpecification(...) ) >>> webhooks[0].disable_notifications() @@ -55,10 +55,10 @@ class Webhook(SerializableModel, allow_update=False): are_notifications_enabled: bool cursor_for_next_payload: int is_hook_enabled: bool - last_successful_notification_time: Optional[str] - notification_url: Optional[str] - last_notification_result: Optional["WebhookNotificationResult"] - expiration_time: Optional[str] + last_successful_notification_time: Optional[datetime] = None + notification_url: Optional[str] = None + last_notification_result: Optional["WebhookNotificationResult"] = None + expiration_time: Optional[datetime] = None specification: "WebhookSpecification" def enable_notifications(self) -> None: @@ -110,7 +110,7 @@ def payloads( >>> iter_payloads = webhook.payloads() >>> next(iter_payloads) WebhookPayload( - timestamp="2022-02-01T21:25:05.663Z", + timestamp=datetime.datetime(...), base_transaction_number=4, payload_format="v0", action_metadata=ActionMetadata( @@ -147,7 +147,7 @@ def payloads( ): payloads = page["payloads"] for index, payload in enumerate(payloads): - payload = WebhookPayload.parse_obj(payload) + payload = WebhookPayload.from_api(payload, self._api, context=self) payload.cursor = cursor + index yield payload count += 1 @@ -172,39 +172,13 @@ class WebhookNotification(AirtableModel): use :meth:`Webhook.payloads ` to retrieve the actual payloads describing the change(s) which triggered the webhook. - You will also need some way to persist the ``cursor`` of the webhook payload, - so that on subsequent calls you do not retrieve the same payloads again. - - Usage: - .. code-block:: python - - from flask import Flask, request - from pyairtable import Api - from pyairtable.models import WebhookNotification - - app = Flask(__name__) - - @app.route("/airtable-webhook", methods=["POST"]) - def airtable_webhook(): - body = request.data - header = request.headers["X-Airtable-Content-MAC"] - secret = app.config["AIRTABLE_WEBHOOK_SECRET"] - event = WebhookNotification.from_request(body, header, secret) - airtable = Api(app.config["AIRTABLE_API_KEY"]) - webhook = airtable.base(event.base.id).webhook(event.webhook.id) - cursor = int(your_db.get(f"cursor_{event.webhook}", 0)) + 1 - for payload in webhook.payloads(cursor=cursor): - # ...do stuff... - your_db.set(f"cursor_{event.webhook}", payload.cursor) - return ("", 204) # intentionally empty response - See `Webhook notification delivery `_ for more information on how these payloads are structured. """ base: _NestedId webhook: _NestedId - timestamp: str + timestamp: datetime @classmethod def from_request( @@ -214,7 +188,7 @@ def from_request( secret: Union[bytes, str], ) -> SelfType: """ - Validates a request body and X-Airtable-Content-MAC header + Validate a request body and X-Airtable-Content-MAC header using the secret returned when the webhook was created. Args: @@ -233,15 +207,15 @@ def from_request( if isinstance(secret, str): secret = base64.decodebytes(secret.encode("ascii")) hmac = HMAC(secret, body.encode("ascii"), "sha256") - expected = "hmac-sha256-" + hmac.hexdigest() + expected = "hmac-sha256=" + hmac.hexdigest() if header != expected: raise ValueError("X-Airtable-Content-MAC header failed validation") - return cls.parse_raw(body) + return cls.model_validate_json(body) class WebhookNotificationResult(AirtableModel): success: bool - completion_timestamp: str + completion_timestamp: datetime duration_ms: float retry_number: int will_be_retried: Optional[bool] = None @@ -257,31 +231,35 @@ class WebhookSpecification(AirtableModel): class Options(AirtableModel): filters: "WebhookSpecification.Filters" - includes: Optional["WebhookSpecification.Includes"] + includes: Optional["WebhookSpecification.Includes"] = None class Filters(AirtableModel): data_types: List[str] - record_change_scope: Optional[str] + record_change_scope: Optional[str] = None change_types: List[str] = FL() from_sources: List[str] = FL() - source_options: Optional["WebhookSpecification.SourceOptions"] + source_options: Optional["WebhookSpecification.SourceOptions"] = None watch_data_in_field_ids: List[str] = FL() watch_schemas_of_field_ids: List[str] = FL() class SourceOptions(AirtableModel): - form_submission: Optional["WebhookSpecification.FormSubmission"] + form_submission: Optional["FormSubmission"] = None + form_page_submission: Optional["FormPageSubmission"] = None + + class FormSubmission(AirtableModel): + view_id: str - class FormSubmission(AirtableModel): - view_id: str + class FormPageSubmission(AirtableModel): + page_id: str class Includes(AirtableModel): - include_cell_values_in_field_ids: List[str] = FL() + include_cell_values_in_field_ids: Union[None, List[str], Literal["all"]] = None include_previous_cell_values: bool = False include_previous_field_definitions: bool = False class CreateWebhook(AirtableModel): - notification_url: Optional[str] + notification_url: Optional[str] = None specification: WebhookSpecification @@ -300,7 +278,7 @@ class CreateWebhookResponse(AirtableModel): mac_secret_base64: str #: The timestamp when the webhook will expire and be deleted. - expiration_time: Optional[str] + expiration_time: Optional[datetime] = None class WebhookPayload(AirtableModel): @@ -309,19 +287,23 @@ class WebhookPayload(AirtableModel): `Webhooks payload `_. """ - timestamp: str + timestamp: datetime base_transaction_number: int payload_format: str - action_metadata: Optional["WebhookPayload.ActionMetadata"] + action_metadata: Optional["WebhookPayload.ActionMetadata"] = None changed_tables_by_id: Dict[str, "WebhookPayload.TableChanged"] = FD() created_tables_by_id: Dict[str, "WebhookPayload.TableCreated"] = FD() destroyed_table_ids: List[str] = FL() - error: Optional[bool] - error_code: Optional[str] = pydantic.Field(alias="code") + error: Optional[bool] = None + error_code: Optional[str] = pydantic.Field(alias="code", default=None) - #: This is not a part of Airtable's webhook payload specification. - #: This indicates the cursor field in the response which provided this payload. - cursor: Optional[int] + #: The payload transaction number, as described in + #: `List webhook payloads - Response format `__. + #: If passed to :meth:`Webhook.payloads` it will return the same payload again, + #: along with any more payloads recorded after it. + #: + #: This field is specific to pyAirtable, and is not part of Airtable's webhook payload specification. + cursor: Optional[int] = None class ActionMetadata(AirtableModel): source: str @@ -332,12 +314,12 @@ class TableInfo(AirtableModel): description: Optional[str] = None class FieldInfo(AirtableModel): - name: Optional[str] - type: Optional[str] + name: Optional[str] = None + type: Optional[str] = None class FieldChanged(AirtableModel): current: "WebhookPayload.FieldInfo" - previous: Optional["WebhookPayload.FieldInfo"] + previous: Optional["WebhookPayload.FieldInfo"] = None class TableChanged(AirtableModel): changed_views_by_id: Dict[str, "WebhookPayload.ViewChanged"] = FD() @@ -345,7 +327,7 @@ class TableChanged(AirtableModel): changed_records_by_id: Dict[RecordId, "WebhookPayload.RecordChanged"] = FD() created_fields_by_id: Dict[str, "WebhookPayload.FieldInfo"] = FD() created_records_by_id: Dict[RecordId, "WebhookPayload.RecordCreated"] = FD() - changed_metadata: Optional["WebhookPayload.TableChanged.ChangedMetadata"] + changed_metadata: Optional["WebhookPayload.TableChanged.ChangedMetadata"] = None destroyed_field_ids: List[str] = FL() destroyed_record_ids: List[RecordId] = FL() @@ -359,20 +341,20 @@ class ViewChanged(AirtableModel): destroyed_record_ids: List[RecordId] = FL() class TableCreated(AirtableModel): - metadata: Optional["WebhookPayload.TableInfo"] + metadata: Optional["WebhookPayload.TableInfo"] = None fields_by_id: Dict[str, "WebhookPayload.FieldInfo"] = FD() records_by_id: Dict[RecordId, "WebhookPayload.RecordCreated"] = FD() class RecordChanged(AirtableModel): current: "WebhookPayload.CellValuesByFieldId" - previous: Optional["WebhookPayload.CellValuesByFieldId"] - unchanged: Optional["WebhookPayload.CellValuesByFieldId"] + previous: Optional["WebhookPayload.CellValuesByFieldId"] = None + unchanged: Optional["WebhookPayload.CellValuesByFieldId"] = None class CellValuesByFieldId(AirtableModel): cell_values_by_field_id: Dict[str, Any] class RecordCreated(AirtableModel): - created_time: str + created_time: datetime cell_values_by_field_id: Dict[str, Any] @@ -382,4 +364,4 @@ class WebhookPayloads(AirtableModel): payloads: List[WebhookPayload] -update_forward_refs(vars()) +rebuild_models(vars()) diff --git a/pyairtable/orm/__init__.py b/pyairtable/orm/__init__.py index b85a24fc..f6d86cd7 100644 --- a/pyairtable/orm/__init__.py +++ b/pyairtable/orm/__init__.py @@ -1,7 +1,8 @@ -from . import fields -from .model import Model +from pyairtable.orm import fields +from pyairtable.orm.model import Model, SaveResult __all__ = [ "Model", + "SaveResult", "fields", ] diff --git a/pyairtable/orm/fields.py b/pyairtable/orm/fields.py index 43c6b2f2..2a48e2ca 100644 --- a/pyairtable/orm/fields.py +++ b/pyairtable/orm/fields.py @@ -1,5 +1,5 @@ """ -Field are used to define the Airtable column type for your pyAirtable models. +Fields define how you'll interact with your data when using the :doc:`orm`. Internally these are implemented as `descriptors `_, which allows us to define methods and type annotations for getting and setting attribute values. @@ -24,18 +24,23 @@ } } """ + import abc import importlib +import re from datetime import date, datetime, timedelta from enum import Enum from typing import ( TYPE_CHECKING, Any, + Callable, ClassVar, + Dict, Generic, List, Literal, Optional, + Set, Tuple, Type, TypeVar, @@ -47,34 +52,49 @@ from typing_extensions import Self as SelfType from typing_extensions import TypeAlias -from pyairtable import utils +from pyairtable import formulas, utils from pyairtable.api.types import ( + AITextDict, AttachmentDict, BarcodeDict, ButtonDict, CollaboratorDict, + CollaboratorEmailDict, + CreateAttachmentDict, RecordId, ) +from pyairtable.exceptions import ( + MissingValueError, + MultipleValuesError, + UnsavedRecordError, +) +from pyairtable.models import schema as S +from pyairtable.models.schema import FieldType +from pyairtable.orm.lists import AttachmentsList, ChangeTrackingList if TYPE_CHECKING: - from pyairtable.orm import Model # noqa + from pyairtable.orm import Model _ClassInfo: TypeAlias = Union[type, Tuple["_ClassInfo", ...]] T = TypeVar("T") -T_Linked = TypeVar("T_Linked", bound="Model") +T_Linked = TypeVar("T_Linked", bound="Model") # used by LinkField T_API = TypeVar("T_API") # type used to exchange values w/ Airtable API -T_ORM = TypeVar("T_ORM") # type used to store values internally +T_ORM = TypeVar("T_ORM") # type used to represent values internally +T_ORM_List = TypeVar("T_ORM_List") # type used for lists of internal values +T_Missing = TypeVar("T_Missing") # type returned when Airtable has no value -class Field(Generic[T_API, T_ORM], metaclass=abc.ABCMeta): +class Field(Generic[T_API, T_ORM, T_Missing], metaclass=abc.ABCMeta): """ A generic class for an Airtable field descriptor that will be included in an ORM model. - Type-checked subclasses should provide two type parameters, - ``T_API`` and ``T_ORM``, which indicate the type returned - by the API and the type used to store values internally. + Type-checked subclasses should provide three type parameters: + + * ``T_API``, indicating the JSON-serializable type returned by the API + * ``T_ORM``, indicating the type used to store values internally + * ``T_Missing``, indicating the type of value returned if the field is empty Subclasses should also define ``valid_types`` as a type or tuple of types, which will be used to validate the type @@ -84,6 +104,9 @@ class Field(Generic[T_API, T_ORM], metaclass=abc.ABCMeta): #: Types that are allowed to be passed to this field. valid_types: ClassVar[_ClassInfo] = () + #: The value to return when the field is missing + missing_value: ClassVar[Any] = None + #: Whether to allow modification of the value in this field. readonly: bool = False @@ -135,11 +158,7 @@ def _description(self) -> str: """ if self._model and self._attribute_name: return f"{self._model.__name__}.{self._attribute_name}" - if self._model: - return f"{self._model.__name__}.{self.field_name}" - if self.field_name: - return f"{self.field_name!r} field" - return "Field" + return f"{self.field_name!r} field" # __get__ and __set__ are called when accessing an instance of Field on an object. # Model.field should return the Field instance itself, whereas @@ -147,48 +166,50 @@ def _description(self) -> str: # Model.field will call __get__(instance=None, owner=Model) @overload - def __get__(self, instance: None, owner: Type[Any]) -> SelfType: - ... + def __get__(self, instance: None, owner: Type[Any]) -> SelfType: ... # obj.field will call __get__(instance=obj, owner=Model) @overload - def __get__(self, instance: "Model", owner: Type[Any]) -> Optional[T_ORM]: - ... + def __get__( + self, instance: "Model", owner: Type[Any] + ) -> Union[T_ORM, T_Missing]: ... def __get__( self, instance: Optional["Model"], owner: Type[Any] - ) -> Union[SelfType, Optional[T_ORM]]: + ) -> Union[SelfType, T_ORM, T_Missing]: # allow calling Model.field to get the field object instead of a value if not instance: return self try: - return cast(T_ORM, instance._fields[self.field_name]) + value = instance._fields[self.field_name] except (KeyError, AttributeError): - return self._missing_value() + return cast(T_Missing, self.missing_value) + if value is None: + return cast(T_Missing, self.missing_value) + return cast(T_ORM, value) def __set__(self, instance: "Model", value: Optional[T_ORM]) -> None: self._raise_if_readonly() - if not hasattr(instance, "_fields"): - instance._fields = {} if self.validate_type and value is not None: self.valid_or_raise(value) + if not hasattr(instance, "_fields"): + instance._fields = {} instance._fields[self.field_name] = value + if hasattr(instance, "_changed"): + instance._changed[self.field_name] = True def __delete__(self, instance: "Model") -> None: raise AttributeError(f"cannot delete {self._description}") - def _missing_value(self) -> Optional[T_ORM]: - return None - def to_record_value(self, value: Any) -> Any: """ - Returns the value which should be persisted to the API. + Calculate the value which should be persisted to the API. """ return value def to_internal_value(self, value: Any) -> Any: """ - Converts a value from the API into the value's internal representation. + Convert a value from the API into the value's internal representation. """ return value @@ -216,27 +237,266 @@ def _repr_fields(self) -> List[Tuple[str, Any]]: ("validate_type", self.validate_type), ] + def eq(self, value: Any) -> "formulas.Comparison": + """ + Build an :class:`~pyairtable.formulas.EQ` comparison using this field. + """ + return formulas.EQ(self, value) + + def ne(self, value: Any) -> "formulas.Comparison": + """ + Build an :class:`~pyairtable.formulas.NE` comparison using this field. + """ + return formulas.NE(self, value) + + def gt(self, value: Any) -> "formulas.Comparison": + """ + Build a :class:`~pyairtable.formulas.GT` comparison using this field. + """ + return formulas.GT(self, value) + + def lt(self, value: Any) -> "formulas.Comparison": + """ + Build an :class:`~pyairtable.formulas.LT` comparison using this field. + """ + return formulas.LT(self, value) + + def gte(self, value: Any) -> "formulas.Comparison": + """ + Build a :class:`~pyairtable.formulas.GTE` comparison using this field. + """ + return formulas.GTE(self, value) + + def lte(self, value: Any) -> "formulas.Comparison": + """ + Build an :class:`~pyairtable.formulas.LTE` comparison using this field. + """ + return formulas.LTE(self, value) + + +class _Requires_API_ORM(Generic[T_API, T_ORM], Field[T_API, T_ORM, T_ORM]): + """ + A mix-in for a Field class which indicates two things: + + 1. It should never receive a null value from the Airtable API. + 2. It should never allow other code to set it as None (or the empty string). + + If either of those conditions occur, the field will raise an exception. + """ + + @overload + def __get__(self, instance: None, owner: Type[Any]) -> SelfType: ... + + @overload + def __get__(self, instance: "Model", owner: Type[Any]) -> T_ORM: ... + + def __get__( + self, instance: Optional["Model"], owner: Type[Any] + ) -> Union[SelfType, T_ORM]: + value = super().__get__(instance, owner) + if value is None or value == "": + raise MissingValueError(f"{self._description} received an empty value") + return value + + def __set__(self, instance: "Model", value: Optional[T_ORM]) -> None: + if value in (None, ""): + raise MissingValueError(f"{self._description} does not accept empty values") + super().__set__(instance, value) + + +T_FieldSchema = TypeVar("T_FieldSchema", bound=S.FieldSchema) + -#: A generic Field whose internal and API representations are the same type. -_BasicField: TypeAlias = Field[T, T] +class _FieldSchema(Generic[T_FieldSchema]): + """ + A mix-in for a Field class which indicates that its field has a particular schema. + """ + + def field_schema(self) -> T_FieldSchema: + """ + Retrieve the schema for the given field. + """ + if not isinstance(self, Field): + raise RuntimeError("field_schema() must be called on a Field instance") + if not self._model: + raise RuntimeError(f"{self._description} was not defined on a Model") + return cast( + T_FieldSchema, self._model.meta.table.schema().field(self.field_name) + ) + + +#: A generic Field with internal and API representations that are the same type. +_BasicField: TypeAlias = Field[T, T, None] +_BasicFieldWithMissingValue: TypeAlias = Field[T, T, T] +_Requires: TypeAlias = _Requires_API_ORM[T, T] #: An alias for any type of Field. -AnyField: TypeAlias = _BasicField[Any] +AnyField: TypeAlias = Field[Any, Any, Any] + + +# ====================================================== +# Helpers for field class documentation +# ====================================================== + + +T_Field = TypeVar("T_Field", bound=Type[Field[Any, Any, Any]]) + + +def _use_inherited_docstring(cls: T_Field) -> T_Field: + """ + Reuses the class's first parent class's docstring if it's undocumented. + """ + from_cls: Type[Any] = cls + while len(from_cls.__mro__) > 1 and not from_cls.__doc__: + from_cls = from_cls.__mro__[1] + if from_cls is Field: + raise RuntimeError(f"{cls} needs a docstring") # pragma: no cover + cls.__doc__ = from_cls.__doc__ + return cls + + +def _maybe_readonly_docstring(cls: T_Field) -> T_Field: + """ + Modifies the class docstring to indicate read-only vs. writable. + """ + _use_inherited_docstring(cls) + if cls.readonly: + cls.__doc__ = re.sub( + r"Accepts (?:only )?((?:[^.`]+|`[^`]+`)+)\.", + r"Read only. Returns \1.", + cls.__doc__ or "", + ) + return cls + + +def _field_api_docstring(*refs_tags: str) -> Callable[[T_Field], T_Field]: + """ + Appends the class docstring with a link to the Airtable API documentation for the field type. + If the class is undocumented, reuses the class's first parent class's docstring. + """ + if len(refs_tags) == 1: + refs_tags = (refs_tags[0], refs_tags[0].lower().replace(" ", "")) + joiner = " and " if len(refs_tags) == 4 else ", " + link_text = "For more about this field type, see %s." % joiner.join( + f"`{ref} `__" + for (ref, tag) in zip(refs_tags[::2], refs_tags[1::2]) + ) + + def _wrapper(cls: T_Field) -> T_Field: + _use_inherited_docstring(cls) + _maybe_readonly_docstring(cls) + utils._append_docstring_text(cls, link_text) + return cls + + return _wrapper + + +def _required_value_docstring(cls: T_Field) -> T_Field: + """ + Appends the class's docstring so that it includes a note about required values. + If the class is undocumented, reuses the class's first parent class's docstring. + """ + _use_inherited_docstring(cls) + _maybe_readonly_docstring(cls) + append = "If the Airtable API returns ``null``, " + if not cls.readonly: + append += "or if a caller sets this field to ``None`` or ``''``, " + append += "this field raises :class:`~pyairtable.exceptions.MissingValueError`." + utils._append_docstring_text(cls, append) + return cls + + +# ====================================================== +# Field types that contain text values +# ====================================================== + + +class _StringField(_BasicFieldWithMissingValue[str]): + """ + Accepts ``str``. + Returns ``""`` instead of ``None`` if the field is empty. + """ + + missing_value = "" + valid_types = str + + +@_field_api_docstring("Email", "emailtext") +class EmailField(_StringField, _FieldSchema[S.EmailFieldSchema]): + pass + + +@_field_api_docstring("Sync source") +class ExternalSyncSourceField( + _StringField, _FieldSchema[S.ExternalSyncSourceFieldSchema] +): + + readonly = True + + +class ManualSortField(_StringField, _FieldSchema[S.ManualSortFieldSchema]): + """ + Read-only. Returns ``""`` instead of ``None`` if the field is empty. + + The ``manualSort`` field type is used to define a manual sort order for a list view. + Its use or behavior via the API is not documented. + """ + + readonly = True + + +@_field_api_docstring("Long text", "multilinetext") +class MultilineTextField(_StringField, _FieldSchema[S.MultilineTextFieldSchema]): + pass + + +@_field_api_docstring("Phone") +class PhoneNumberField(_StringField, _FieldSchema[S.PhoneNumberFieldSchema]): + pass + + +@_field_api_docstring("Rich text") +class RichTextField(_StringField, _FieldSchema[S.RichTextFieldSchema]): + pass -class TextField(_BasicField[str]): +@_field_api_docstring("Single select", "select") +class SelectField(Field[str, str, None], _FieldSchema[S.SingleSelectFieldSchema]): """ - Used for all Airtable text fields. Accepts ``str``. + Represents a single select dropdown field. Accepts ``str`` or ``None``. - See `Single line text `__ - and `Long text `__. + This will return ``None`` if no value is set, and will only return ``""`` + if an empty dropdown option is available and selected. """ valid_types = str -class _NumericField(Generic[T], _BasicField[T]): +@_field_api_docstring("Single line text", "simpletext") +class SingleLineTextField(_StringField, _FieldSchema[S.SingleLineTextFieldSchema]): + pass + + +@_field_api_docstring("Single line text", "simpletext", "Long text", "multilinetext") +class TextField( + _StringField, + _FieldSchema[Union[S.SingleLineTextFieldSchema, S.MultilineTextFieldSchema]], +): + pass + + +@_field_api_docstring("Url", "urltext") +class UrlField(_StringField, _FieldSchema[S.UrlFieldSchema]): + pass + + +# ====================================================== +# Field types that contain numeric values +# ====================================================== + + +class _NumericFieldBase(_BasicField[T]): """ Base class for Number, Float, and Integer. Shares a common validation rule. """ @@ -247,198 +507,256 @@ def valid_or_raise(self, value: Any) -> None: raise TypeError( f"{self.__class__.__name__} value must be {self.valid_types}; got {type(value)}" ) - return super().valid_or_raise(value) + super().valid_or_raise(value) -class NumberField(_NumericField[Union[int, float]]): +class _NumberField(_NumericFieldBase[Union[int, float]]): """ Number field with unspecified precision. Accepts either ``int`` or ``float``. - - See `Number `__. """ valid_types = (int, float) -# This cannot inherit from NumberField because valid_types would be more restrictive -# in the subclass than what is defined in the parent class. -class IntegerField(_NumericField[int]): +class _IntegerField(_NumericFieldBase[int]): """ Number field with integer precision. Accepts only ``int`` values. - - See `Number `__. """ valid_types = int -# This cannot inherit from NumberField because valid_types would be more restrictive -# in the subclass than what is defined in the parent class. -class FloatField(_NumericField[float]): +class _FloatField(_NumericFieldBase[float]): """ Number field with decimal precision. Accepts only ``float`` values. - - See `Number `__. """ valid_types = float -class RatingField(IntegerField): - """ - Accepts ``int`` values that are greater than zero. +@_field_api_docstring("Number", "decimalorintegernumber") +class NumberField(_NumberField, _FieldSchema[S.NumberFieldSchema]): + pass - See `Rating `__. - """ - def valid_or_raise(self, value: int) -> None: - super().valid_or_raise(value) - if value < 1: - raise ValueError("rating cannot be below 1") +# This cannot inherit from NumberField because valid_types would be more restrictive +# in the subclass than what is defined in the parent class. +@_field_api_docstring("Number", "decimalorintegernumber") +class IntegerField(_IntegerField, _FieldSchema[S.NumberFieldSchema]): + pass -class CheckboxField(_BasicField[bool]): - """ - Returns ``False`` instead of ``None`` if the field is empty on the Airtable base. +# This cannot inherit from NumberField because valid_types would be more restrictive +# in the subclass than what is defined in the parent class. +@_field_api_docstring("Number", "decimalorintegernumber") +class FloatField(_FloatField, _FieldSchema[S.NumberFieldSchema]): + pass + - See `Checkbox `__. +@_field_api_docstring("Checkbox") +class CheckboxField(Field[bool, bool, bool], _FieldSchema[S.CheckboxFieldSchema]): + """ + Accepts ``bool``. + Returns ``False`` instead of ``None`` if the field is empty. """ + missing_value = False valid_types = bool - def _missing_value(self) -> bool: - return False +@_field_api_docstring("Count", "count") +class CountField(_IntegerField, _FieldSchema[S.CountFieldSchema]): + readonly = True -class DatetimeField(Field[str, datetime]): + +@_field_api_docstring("Currency", "currencynumber") +class CurrencyField(_NumberField, _FieldSchema[S.CurrencyFieldSchema]): + pass + + +@_field_api_docstring("Percent", "percentnumber") +class PercentField(_NumberField, _FieldSchema[S.PercentFieldSchema]): + pass + + +@_field_api_docstring("Rating") +class RatingField(_IntegerField, _FieldSchema[S.RatingFieldSchema]): + """ + Accepts ``int`` values that are greater than zero. """ - DateTime field. Accepts only `datetime `_ values. - See `Date and time `__. + def valid_or_raise(self, value: int) -> None: + super().valid_or_raise(value) + if value < 1: + raise ValueError("rating cannot be below 1") + + +# ====================================================== +# Field types that contain dates or datetimes +# ====================================================== + + +class _DatetimeField(Field[str, datetime, None]): + """ + Accepts only `datetime `_ values. """ valid_types = datetime def to_record_value(self, value: datetime) -> str: """ - Converts a ``datetime`` into an ISO 8601 string, e.g. "2014-09-05T12:34:56.000Z". + Convert a ``datetime`` into an ISO 8601 string, e.g. "2014-09-05T12:34:56.000Z". """ return utils.datetime_to_iso_str(value) def to_internal_value(self, value: str) -> datetime: """ - Converts an ISO 8601 string, e.g. "2014-09-05T07:00:00.000Z" into a ``datetime``. + Convert an ISO 8601 string, e.g. "2014-09-05T07:00:00.000Z" into a ``datetime``. """ return utils.datetime_from_iso_str(value) -class DateField(Field[str, date]): - """ - Date field. Accepts only `date `_ values. +@_field_api_docstring("Date and time") +class DatetimeField(_DatetimeField, _FieldSchema[S.DateTimeFieldSchema]): + pass + - See `Date `__. +@_field_api_docstring("Date", "dateonly") +class DateField(Field[str, date, None], _FieldSchema[S.DateFieldSchema]): + """ + Accepts only `date `_ values. """ valid_types = date def to_record_value(self, value: date) -> str: """ - Converts a ``date`` into an ISO 8601 string, e.g. "2014-09-05". + Convert a ``date`` into an ISO 8601 string, e.g. "2014-09-05". """ return utils.date_to_iso_str(value) def to_internal_value(self, value: str) -> date: """ - Converts an ISO 8601 string, e.g. "2014-09-05" into a ``date``. + Convert an ISO 8601 string, e.g. "2014-09-05" into a ``date``. """ return utils.date_from_iso_str(value) -class DurationField(Field[int, timedelta]): +@_field_api_docstring("Duration", "durationnumber") +class DurationField(Field[int, timedelta, None], _FieldSchema[S.DurationFieldSchema]): """ Duration field. Accepts only `timedelta `_ values. - - See `Duration `__. - Airtable's API returns this as a number of seconds. """ valid_types = timedelta def to_record_value(self, value: timedelta) -> float: """ - Converts a ``timedelta`` into a number of seconds. + Convert a ``timedelta`` into a number of seconds. """ return value.total_seconds() def to_internal_value(self, value: Union[int, float]) -> timedelta: """ - Converts a number of seconds into a ``timedelta``. + Convert a number of seconds into a ``timedelta``. """ return timedelta(seconds=value) -class _DictField(Generic[T], _BasicField[T]): - """ - Generic field type that stores a single dict. Not for use via API; - should be subclassed by concrete field types (below). - """ - - valid_types = dict +# ====================================================== +# Field types that contain complex values (dicts, lists) +# ====================================================== -class _ListField(Generic[T_API, T_ORM], Field[List[T_API], List[T_ORM]]): +class _ListFieldBase( + Generic[T_API, T_ORM, T_ORM_List], + Field[List[T_API], List[T_ORM], T_ORM_List], +): """ - Generic type for a field that stores a list of values. Can be used - to refer to a lookup field that might return more than one value. + Generic type for a field that stores a list of values. Not for direct use; should be subclassed by concrete field types (below). + + Generic type parameters: + * ``T_API``: The type of value returned by the Airtable API. + * ``T_ORM``: The type of value stored internally. + * ``T_ORM_List``: The type of list object that will be returned. """ valid_types = list + list_class: Type[T_ORM_List] + contains_type: Optional[Type[T_ORM]] # List fields will always return a list, never ``None``, so we # have to overload the type annotations for __get__ + def __init_subclass__(cls, **kwargs: Any) -> None: + cls.contains_type = kwargs.pop("contains_type", None) + cls.list_class = kwargs.pop("list_class", ChangeTrackingList) + + if cls.contains_type and not isinstance(cls.contains_type, type): + raise TypeError(f"contains_type= expected a type, got {cls.contains_type}") + if not isinstance(cls.list_class, type): + raise TypeError(f"list_class= expected a type, got {cls.list_class}") + if not issubclass(cls.list_class, ChangeTrackingList): + raise TypeError( + f"list_class= expected Type[ChangeTrackingList], got {cls.list_class}" + ) + + return super().__init_subclass__(**kwargs) + @overload - def __get__(self, instance: None, owner: Type[Any]) -> SelfType: - ... + def __get__(self, instance: None, owner: Type[Any]) -> SelfType: ... @overload - def __get__(self, instance: "Model", owner: Type[Any]) -> List[T_ORM]: - ... + def __get__(self, instance: "Model", owner: Type[Any]) -> T_ORM_List: ... def __get__( self, instance: Optional["Model"], owner: Type[Any] - ) -> Union[SelfType, List[T_ORM]]: + ) -> Union[SelfType, T_ORM_List]: if not instance: return self return self._get_list_value(instance) - def _get_list_value(self, instance: "Model") -> List[T_ORM]: - value = cast(List[T_ORM], instance._fields.get(self.field_name)) - # If Airtable returns no value, substitute an empty list. - if value is None: - value = [] - # For implementers to be able to modify this list in place - # and persist it later when they call .save(), we need to - # set this empty list as the field's value. - if not self.readonly: - instance._fields[self.field_name] = value - return value + def __set__(self, instance: "Model", value: Optional[List[T_ORM]]) -> None: + if isinstance(value, list) and not isinstance(value, self.list_class): + assert isinstance(self.list_class, type) + assert issubclass(self.list_class, ChangeTrackingList) + value = self.list_class(value, field=self, model=instance) + super().__set__(instance, value) - def to_internal_value(self, value: Optional[List[T_ORM]]) -> List[T_ORM]: + def _get_list_value(self, instance: "Model") -> T_ORM_List: + value = instance._fields.get(self.field_name) + # If Airtable returns no value, substitute an empty list. if value is None: value = [] - return value - -class _ValidatingListField(Generic[T], _ListField[T, T]): - contains_type: Type[T] + # We need to keep track of any mutations to this list, so we know + # whether to write the field back to the API when the model is saved. + if not isinstance(value, self.list_class): + # These were already checked in __init_subclass__ but mypy doesn't know that. + assert isinstance(self.list_class, type) + assert issubclass(self.list_class, ChangeTrackingList) + value = self.list_class(value, field=self, model=instance) + + # For implementers to be able to modify this list in place + # and persist it later when they call .save(), we need to + # set the list as the field's value. + instance._fields[self.field_name] = value + return cast(T_ORM_List, value) # type: ignore[redundant-cast] def valid_or_raise(self, value: Any) -> None: super().valid_or_raise(value) - for obj in value: - if not isinstance(obj, self.contains_type): - raise TypeError(f"expected {self.contains_type}; got {type(obj)}") + if self.contains_type: + for obj in value: + if not isinstance(obj, self.contains_type): + raise TypeError(f"expected {self.contains_type}; got {type(obj)}") + + +class _ListField(_ListFieldBase[T, T, ChangeTrackingList[T]]): + """ + Generic type for a field that stores a list of values. + Not for direct use; should be subclassed by concrete field types (below). + """ class _LinkFieldOptions(Enum): @@ -449,17 +767,25 @@ class _LinkFieldOptions(Enum): LinkSelf = _LinkFieldOptions.LinkSelf -class LinkField(_ListField[RecordId, T_Linked]): +@_field_api_docstring("Link to another record", "foreignkey") +class LinkField( + Generic[T_Linked], + _ListFieldBase[ + RecordId, + T_Linked, + ChangeTrackingList[T_Linked], + ], + _FieldSchema[S.MultipleRecordLinksFieldSchema], +): """ Represents a MultipleRecordLinks field. Returns and accepts lists of Models. Can also be used with a lookup field that pulls from a MultipleRecordLinks field, provided the field is created with ``readonly=True``. - - See `Link to another record `__. """ _linked_model: Union[str, Literal[_LinkFieldOptions.LinkSelf], Type[T_Linked]] + _max_retrieve: Optional[int] = None def __init__( self, @@ -488,10 +814,10 @@ def __init__( readonly: If ``True``, any attempt to write a value to this field will raise an ``AttributeError``. This will not, however, prevent any modification of the list object returned by this field. - lazy: If ``True``, this field will return empty objects with oly IDs; + lazy: If ``True``, this field will return empty objects with only IDs; call :meth:`~pyairtable.orm.Model.fetch` to retrieve values. """ - from pyairtable.orm import Model # noqa, avoid circular import + from pyairtable.orm import Model if not ( model is _LinkFieldOptions.LinkSelf @@ -507,7 +833,7 @@ def __init__( @property def linked_model(self) -> Type[T_Linked]: """ - Resolves a :class:`~pyairtable.orm.Model` class based on + Resolve a :class:`~pyairtable.orm.Model` class based on the ``model=`` constructor parameter to this field instance. """ if isinstance(self._linked_model, str): @@ -535,56 +861,106 @@ def _repr_fields(self) -> List[Tuple[str, Any]]: ("lazy", self._lazy), ] - def _get_list_value(self, instance: "Model") -> List[T_Linked]: + def populate( + self, + instance: "Model", + *, + lazy: Optional[bool] = None, + memoize: Optional[bool] = None, + ) -> None: """ - Unlike most other field classes, LinkField does not store its internal - representation (T_ORM) in instance._fields after Model.from_record(). - Instead, we defer creating objects until they're requested for the first - time, so we can avoid infinite recursion during to_internal_value(). + Populates the field's value for the given instance. This allows you to + control how linked models are loaded, depending on your need, without + having to decide at the time of field or model construction. + + Args: + instance: An instance of this field's :class:`~pyairtable.orm.Model` class. + lazy: |kwarg_orm_lazy| + memoize: |kwarg_orm_memoize| + + Usage: + + .. code-block:: python + + from pyairtable.orm import Model, fields as F + + class Book(Model): + class Meta: ... + + class Author(Model): + class Meta: ... + books = F.LinkField("Books", Book) + + author = Author.from_id("reculZ6qSLw0OCA61") + Author.books.populate(author, lazy=True, memoize=False) """ + if self._model and not isinstance(instance, self._model): + raise RuntimeError( + f"populate() got {type(instance)}; expected {self._model}" + ) + lazy = lazy if lazy is not None else self._lazy if not (records := super()._get_list_value(instance)): - return records + return # If there are any values which are IDs rather than instances, # retrieve their values in bulk, and store them keyed by ID # so we can maintain the order we received from the API. new_records = {} - if new_record_ids := [v for v in records if isinstance(v, RecordId)]: + if new_record_ids := [ + v for v in records[: self._max_retrieve] if isinstance(v, RecordId) + ]: new_records = { record.id: record for record in self.linked_model.from_ids( cast(List[RecordId], new_record_ids), - fetch=(not self._lazy), + memoize=memoize, + fetch=(not lazy), ) } # If the list contains record IDs, replace the contents with instances. # Other code may already have references to this specific list, so # we replace the existing list's values. - records[:] = [ - new_records[cast(RecordId, value)] if isinstance(value, RecordId) else value - for value in records - ] - return records + with records.disable_tracking(): + records[: self._max_retrieve] = [ + ( + new_records[cast(RecordId, value)] + if isinstance(value, RecordId) + else value + ) + for value in records[: self._max_retrieve] + ] + + def _get_list_value(self, instance: "Model") -> ChangeTrackingList[T_Linked]: + """ + Unlike most other field classes, LinkField does not store its internal + representation (T_ORM) in instance._fields after Model.from_record(). + They will first be stored as a list of IDs. + + We defer creating Model objects until they're requested for the first + time, so we can avoid infinite recursion during to_internal_value(). + """ + self.populate(instance) + return super()._get_list_value(instance) - def to_record_value(self, value: Union[List[str], List[T_Linked]]) -> List[str]: + def to_record_value(self, value: List[Union[str, T_Linked]]) -> List[str]: """ - Returns the list of record IDs which should be persisted to the API. + Build the list of record IDs which should be persisted to the API. """ - if not value: - return [] # If the _fields value contains str, it means we loaded it from the API # but we never actually accessed the value (see _get_list_value). # When persisting this model back to the API, we can just write those IDs. if all(isinstance(v, str) for v in value): return cast(List[str], value) - # From here on, we assume we're dealing with models, not record IDs. - records = cast(List[T_Linked], value) + + # Validate any items in our list which are not record IDs + records = [v for v in value if not isinstance(v, str)] self.valid_or_raise(records) - # We could *try* to recursively save models that don't have an ID yet, - # but that requires us to second-guess the implementers' intentions. - # Better to just raise an exception. if not all(record.exists() for record in records): - raise ValueError(f"{self._description} contains an unsaved record") - return [record.id for record in records] + # We could *try* to recursively save models that don't have an ID yet, + # but that requires us to second-guess the implementers' intentions. + # Better to just raise an exception. + raise UnsavedRecordError(f"{self._description} contains an unsaved record") + + return [v if isinstance(v, str) else v.id for v in value] def valid_or_raise(self, value: Any) -> None: super().valid_or_raise(value) @@ -593,136 +969,257 @@ def valid_or_raise(self, value: Any) -> None: raise TypeError(f"expected {self.linked_model}; got {type(obj)}") -# Many of these are "passthrough" subclasses for now. E.g. there is no real -# difference between `field = TextField()` and `field = PhoneNumberField()`. -# -# But we might choose to add more type-specific functionality later, so -# we'll allow implementers to get as specific as they care to and they might -# get some extra functionality for free in the future. +class SingleLinkField( + Generic[T_Linked], + Field[List[str], T_Linked, None], + _FieldSchema[S.MultipleRecordLinksFieldSchema], +): + """ + Represents a MultipleRecordLinks field which we assume will only ever contain one link. + Returns and accepts a single instance of the linked model, which will be converted to/from + a list of IDs when communicating with the Airtable API. + See `Link to another record `__. -class AttachmentsField(_ValidatingListField[AttachmentDict]): - """ - Accepts a list of dicts in the format detailed in - `Attachments `_. - """ + .. warning:: - contains_type = cast(Type[AttachmentDict], dict) + If Airtable returns multiple IDs for a SingleLinkField and you modify the field value, + only the first ID will be saved to the API once you call ``.save()``. The other IDs will be lost. + By default, a SingleLinkField will ignore the 2nd...Nth IDs if it receives multiple IDs from the API. + This behavior can be overridden by passing ``raise_if_many=True`` to the constructor. -class AutoNumberField(IntegerField): - """ - Equivalent to :class:`IntegerField(readonly=True) `. + .. code-block:: python - See `Auto number `__. - """ + from pyairtable.orm import Model, fields as F - readonly = True + class Book(Model): + class Meta: ... + author = F.SingleLinkField("Author", Person) + editor = F.SingleLinkField("Editor", Person, raise_if_many=True) -class BarcodeField(_DictField[BarcodeDict]): - """ - Accepts a `dict` that should conform to the format detailed in the - `Barcode `_ - documentation. - """ + Given the model configuration above and the data below, + one field will silently return a single value, + while the other field will throw an exception. + .. code-block:: python + + >>> book = Book.from_record({ + ... "id": "recZ6qSLw0OCA61ul", + ... "createdTime": ..., + ... "fields": { + ... "Author": ["reculZ6qSLw0OCA61", "rec61ulZ6qSLw0OCA"], + ... "Editor": ["recLw0OCA61ulZ6qS", "recOCA61ulZ6qSLw0"], + ... } + ... }) + >>> book.author + + >>> book.editor + Traceback (most recent call last): + ... + MultipleValues: Book.editor got more than one linked record -class ButtonField(_DictField[ButtonDict]): - """ - Read-only field that returns a `dict`. For more information, read the - `Button `_ - documentation. """ - readonly = True + @utils.docstring_from( + LinkField.__init__, + append=( + " raise_if_many: If ``True``, this field will raise a" + " :class:`~pyairtable.orm.fields.MultipleValues` exception upon" + " being accessed if the underlying field contains multiple values." + ), + ) + def __init__( + self, + field_name: str, + model: Union[str, Literal[_LinkFieldOptions.LinkSelf], Type[T_Linked]], + validate_type: bool = True, + readonly: Optional[bool] = None, + lazy: bool = False, + raise_if_many: bool = False, + ): + super().__init__(field_name, validate_type=validate_type, readonly=readonly) + self._raise_if_many = raise_if_many + # composition is easier than inheritance in this case ¯\_(ツ)_/¯ + self._link_field = LinkField[T_Linked]( + field_name, + model, + validate_type=validate_type, + readonly=readonly, + lazy=lazy, + ) + self._link_field._max_retrieve = 1 + def _repr_fields(self) -> List[Tuple[str, Any]]: + return [ + ("model", self._link_field._linked_model), + ("validate_type", self.validate_type), + ("readonly", self.readonly), + ("lazy", self._link_field._lazy), + ("raise_if_many", self._raise_if_many), + ] -class CollaboratorField(_DictField[CollaboratorDict]): - """ - Accepts a `dict` that should conform to the format detailed in the - `Collaborator `_ - documentation. - """ + @overload + def __get__(self, instance: None, owner: Type[Any]) -> SelfType: ... + @overload + def __get__(self, instance: "Model", owner: Type[Any]) -> Optional[T_Linked]: ... -class CountField(IntegerField): - """ - Equivalent to :class:`IntegerField(readonly=True) `. + def __get__( + self, instance: Optional["Model"], owner: Type[Any] + ) -> Union[SelfType, Optional[T_Linked]]: + if not instance: + return self + if self._raise_if_many and len(instance._fields.get(self.field_name) or []) > 1: + raise MultipleValuesError( + f"{self._description} got more than one linked record" + ) + links = self._link_field.__get__(instance, owner) + try: + return links[0] + except IndexError: + return None - See `Count `__. - """ + def __set__(self, instance: "Model", value: Optional[T_Linked]) -> None: + values = None if value is None else [value] + self._link_field.__set__(instance, values) - readonly = True + def __set_name__(self, owner: Any, name: str) -> None: + super().__set_name__(owner, name) + self._link_field.__set_name__(owner, name) + def to_record_value(self, value: List[Union[str, T_Linked]]) -> List[str]: + return self._link_field.to_record_value(value) -class CreatedByField(CollaboratorField): - """ - Equivalent to :class:`CollaboratorField(readonly=True) `. + @utils.docstring_from(LinkField.populate) + def populate( + self, + instance: "Model", + *, + lazy: Optional[bool] = None, + memoize: Optional[bool] = None, + ) -> None: + self._link_field.populate(instance, lazy=lazy, memoize=memoize) - See `Created by `__. + @property + @utils.docstring_from(LinkField.linked_model) + def linked_model(self) -> Type[T_Linked]: + return self._link_field.linked_model + + +class _DictField(_BasicField[T]): + """ + Generic field type that stores a single dict. Not for use via API; + should be subclassed by concrete field types (below). """ - readonly = True + valid_types = dict -class CreatedTimeField(DatetimeField): +@_field_api_docstring("AI Text") +class AITextField(_DictField[AITextDict], _FieldSchema[S.AITextFieldSchema]): """ - Equivalent to :class:`DatetimeField(readonly=True) `. - - See `Created time `__. + Read-only field that returns a ``dict``. """ readonly = True -class CurrencyField(NumberField): +@_field_api_docstring("Attachments", "multipleattachment") +class AttachmentsField( + _ListFieldBase[ + AttachmentDict, Union[AttachmentDict, CreateAttachmentDict], AttachmentsList + ], + _FieldSchema[S.MultipleAttachmentsFieldSchema], + list_class=AttachmentsList, + contains_type=dict, +): """ - Equivalent to :class:`~NumberField`. - - See `Currency `__. + Accepts a list of :class:`~pyairtable.api.types.AttachmentDict` or + :class:`~pyairtable.api.types.CreateAttachmentDict`. """ -class EmailField(TextField): +@_field_api_docstring("Barcode") +class BarcodeField(_DictField[BarcodeDict], _FieldSchema[S.BarcodeFieldSchema]): """ - Equivalent to :class:`~TextField`. - - See `Email `__. + Accepts a list of :class:`~pyairtable.api.types.BarcodeDict`. """ -class ExternalSyncSourceField(TextField): +@_field_api_docstring("Button") +@_required_value_docstring +class ButtonField( + _DictField[ButtonDict], + _Requires[ButtonDict], + _FieldSchema[S.ButtonFieldSchema], +): """ - Equivalent to :class:`TextField(readonly=True) `. - - See `Sync source `__. + Read-only field that returns a :class:`~pyairtable.api.types.ButtonDict`. """ readonly = True -class LastModifiedByField(CollaboratorField): +@_field_api_docstring("Collaborator") +class CollaboratorField( + _DictField[Union[CollaboratorDict, CollaboratorEmailDict]], + _FieldSchema[S.SingleCollaboratorFieldSchema], +): + """ + Accepts a :class:`~pyairtable.api.types.CollaboratorDict` or + :class:`~pyairtable.api.types.CollaboratorEmailDict`. """ - Equivalent to :class:`CollaboratorField(readonly=True) `. - See `Last modified by `__. + +@_field_api_docstring("Created by") +@_required_value_docstring +class CreatedByField( + _DictField[CollaboratorDict], + _Requires[CollaboratorDict], + _FieldSchema[S.CreatedByFieldSchema], +): + """ + Returns a :class:`~pyairtable.api.types.CollaboratorDict`. """ readonly = True -class LastModifiedTimeField(DatetimeField): - """ - Equivalent to :class:`DatetimeField(readonly=True) `. +@_field_api_docstring("Created time") +@_required_value_docstring +class CreatedTimeField( + _DatetimeField, + _Requires_API_ORM[str, datetime], + _FieldSchema[S.CreatedTimeFieldSchema], +): + + readonly = True - See `Last modified time `__. + +@_field_api_docstring("Last modified by") +@_required_value_docstring +class LastModifiedByField( + _DictField[CollaboratorDict], _FieldSchema[S.LastModifiedByFieldSchema] +): + """ + Read-only. Returns a :class:`~pyairtable.api.types.CollaboratorDict`. """ readonly = True -class LookupField(Generic[T], _ListField[T, T]): +@_field_api_docstring("Last modified time") +class LastModifiedTimeField( + _DatetimeField, _FieldSchema[S.LastModifiedTimeFieldSchema] +): + + readonly = True + + +@_field_api_docstring("Lookup") +class LookupField(_ListField[T], _FieldSchema[S.MultipleLookupValuesFieldSchema]): """ Generic field class for a lookup, which returns a list of values. @@ -737,76 +1234,159 @@ class LookupField(Generic[T], _ListField[T, T]): >>> rec = MyTable.first() >>> rec.lookup ["First value", "Second value", ...] - - See `Lookup `__. """ readonly = True -class MultipleCollaboratorsField(_ValidatingListField[CollaboratorDict]): +@_field_api_docstring("Multiple collaborators", "multicollaborator") +class MultipleCollaboratorsField( + _ListField[Union[CollaboratorDict, CollaboratorEmailDict]], + _FieldSchema[S.MultipleCollaboratorsFieldSchema], + contains_type=dict, +): """ - Accepts a list of dicts in the format detailed in - `Multiple Collaborators `_. + Accepts a list of :class:`~pyairtable.api.types.CollaboratorDict` or + :class:`~pyairtable.api.types.CollaboratorEmailDict`. """ - contains_type = cast(Type[CollaboratorDict], dict) - -class MultipleSelectField(_ValidatingListField[str]): +@_field_api_docstring("Multiple select", "multiselect") +class MultipleSelectField( + _ListField[str], _FieldSchema[S.MultipleSelectsFieldSchema], contains_type=str +): """ Accepts a list of ``str``. - - See `Multiple select `__. """ - contains_type = str +# ====================================================== +# Derived field types that disallow None or empty string +# ====================================================== -class PercentField(NumberField): - """ - Equivalent to :class:`~NumberField`. - See `Percent `__. - """ +@_field_api_docstring("Auto number", "autonumber") +@_required_value_docstring +class AutoNumberField( + _IntegerField, + _Requires[int], + _FieldSchema[S.AutoNumberFieldSchema], +): + readonly = True -class PhoneNumberField(TextField): - """ - Equivalent to :class:`~TextField`. +@_required_value_docstring +class RequiredAITextField(AITextField, _Requires[AITextDict]): + pass - See `Phone `__. - """ +@_required_value_docstring +class RequiredBarcodeField(BarcodeField, _Requires[BarcodeDict]): + pass -class RichTextField(TextField): - """ - Equivalent to :class:`~TextField`. - See `Rich text `__. - """ +@_required_value_docstring +class RequiredCollaboratorField( + CollaboratorField, + _Requires[Union[CollaboratorDict, CollaboratorEmailDict]], +): + pass -class SelectField(TextField): - """ - Equivalent to :class:`~TextField`. +@_required_value_docstring +class RequiredCountField(CountField, _Requires[int]): + pass - See `Single select `__. - """ +@_required_value_docstring +class RequiredCurrencyField(CurrencyField, _Requires[Union[int, float]]): + pass -class UrlField(TextField): - """ - Equivalent to :class:`~TextField`. - See `Url `__. - """ +@_required_value_docstring +class RequiredDateField(DateField, _Requires_API_ORM[str, date]): + pass + + +@_required_value_docstring +class RequiredDatetimeField(DatetimeField, _Requires_API_ORM[str, datetime]): + pass + + +@_required_value_docstring +class RequiredDurationField(DurationField, _Requires_API_ORM[int, timedelta]): + pass + + +@_required_value_docstring +class RequiredEmailField(EmailField, _Requires[str]): + pass + + +@_required_value_docstring +class RequiredFloatField(FloatField, _Requires[float]): + pass + + +@_required_value_docstring +class RequiredIntegerField(IntegerField, _Requires[int]): + pass + + +@_required_value_docstring +class RequiredNumberField(NumberField, _Requires[Union[int, float]]): + pass + + +@_required_value_docstring +class RequiredPercentField(PercentField, _Requires[Union[int, float]]): + pass + + +@_required_value_docstring +class RequiredPhoneNumberField(PhoneNumberField, _Requires[str]): + pass + + +@_required_value_docstring +class RequiredRatingField(RatingField, _Requires[int]): + pass + + +@_required_value_docstring +class RequiredRichTextField(RichTextField, _Requires[str]): + pass + + +@_required_value_docstring +class RequiredSelectField(SelectField, _Requires_API_ORM[str, str]): + pass + + +@_required_value_docstring +class RequiredTextField(TextField, _Requires[str]): + pass + + +@_required_value_docstring +class RequiredSingleLineTextField(SingleLineTextField, _Requires[str]): + pass + + +@_required_value_docstring +class RequiredMultilineTextField(MultilineTextField, _Requires[str]): + pass + + +@_required_value_docstring +class RequiredUrlField(UrlField, _Requires[str]): + pass #: Set of all Field subclasses exposed by the library. #: #: :meta hide-value: -ALL_FIELDS = { +ALL_FIELDS: Set[Type[AnyField]] = { field_class for name, field_class in vars().items() if isinstance(field_class, type) @@ -819,59 +1399,156 @@ class UrlField(TextField): #: Set of all read-only Field subclasses exposed by the library. #: #: :meta hide-value: -READONLY_FIELDS = {cls for cls in ALL_FIELDS if cls.readonly} +READONLY_FIELDS: Set[Type[AnyField]] = {cls for cls in ALL_FIELDS if cls.readonly} #: Mapping of Airtable field type names to their ORM classes. #: See https://airtable.com/developers/web/api/field-model -#: and :ref:`Formulas, Rollups, and Lookups`. +#: and :ref:`Formula, Rollup, and Lookup Fields`. #: #: The data type of "formula" and "rollup" fields will depend #: on the underlying fields they reference, so it is not practical #: for the ORM to know or detect those fields' types. These two #: field type names are mapped to the constant ``NotImplemented``. #: +#: Keys are :class:`~pyairtable.models.schema.FieldType` enum values, +#: which inherit from ``str`` and can be used in string comparisons. +#: #: :meta hide-value: -FIELD_TYPES_TO_CLASSES = { - "autoNumber": AutoNumberField, - "barcode": BarcodeField, - "button": ButtonField, - "checkbox": CheckboxField, - "count": CountField, - "createdBy": CreatedByField, - "createdTime": CreatedTimeField, - "currency": CurrencyField, - "date": DateField, - "dateTime": DatetimeField, - "duration": DurationField, - "email": EmailField, - "externalSyncSource": ExternalSyncSourceField, - "formula": NotImplemented, - "lastModifiedBy": LastModifiedByField, - "lastModifiedTime": LastModifiedTimeField, - "lookup": LookupField, - "multilineText": TextField, - "multipleAttachments": AttachmentsField, - "multipleCollaborators": MultipleCollaboratorsField, - "multipleRecordLinks": LinkField, - "multipleSelects": MultipleSelectField, - "number": NumberField, - "percent": PercentField, - "phoneNumber": PhoneNumberField, - "rating": RatingField, - "richText": RichTextField, - "rollup": NotImplemented, - "singleCollaborator": CollaboratorField, - "singleLineText": TextField, - "singleSelect": SelectField, - "url": UrlField, +FIELD_TYPES_TO_CLASSES: Dict[str, Type[AnyField]] = { + FieldType.AI_TEXT: AITextField, + FieldType.AUTO_NUMBER: AutoNumberField, + FieldType.BARCODE: BarcodeField, + FieldType.BUTTON: ButtonField, + FieldType.CHECKBOX: CheckboxField, + FieldType.COUNT: CountField, + FieldType.CREATED_BY: CreatedByField, + FieldType.CREATED_TIME: CreatedTimeField, + FieldType.CURRENCY: CurrencyField, + FieldType.DATE: DateField, + FieldType.DATE_TIME: DatetimeField, + FieldType.DURATION: DurationField, + FieldType.EMAIL: EmailField, + FieldType.EXTERNAL_SYNC_SOURCE: ExternalSyncSourceField, + FieldType.FORMULA: NotImplemented, + FieldType.LAST_MODIFIED_BY: LastModifiedByField, + FieldType.LAST_MODIFIED_TIME: LastModifiedTimeField, + "lookup": LookupField, # Deprecated alias for multipleLookupValues + FieldType.MANUAL_SORT: ManualSortField, + FieldType.MULTILINE_TEXT: TextField, + FieldType.MULTIPLE_ATTACHMENTS: AttachmentsField, + FieldType.MULTIPLE_COLLABORATORS: MultipleCollaboratorsField, + FieldType.MULTIPLE_LOOKUP_VALUES: LookupField, + FieldType.MULTIPLE_RECORD_LINKS: LinkField, + FieldType.MULTIPLE_SELECTS: MultipleSelectField, + FieldType.NUMBER: NumberField, + FieldType.PERCENT: PercentField, + FieldType.PHONE_NUMBER: PhoneNumberField, + FieldType.RATING: RatingField, + FieldType.RICH_TEXT: RichTextField, + FieldType.ROLLUP: NotImplemented, + FieldType.SINGLE_COLLABORATOR: CollaboratorField, + FieldType.SINGLE_LINE_TEXT: TextField, + FieldType.SINGLE_SELECT: SelectField, + FieldType.URL: UrlField, } #: Mapping of field classes to the set of supported Airtable field types. #: #: :meta hide-value: -FIELD_CLASSES_TO_TYPES = { +FIELD_CLASSES_TO_TYPES: Dict[Type[AnyField], Set[str]] = { cls: {key for (key, val) in FIELD_TYPES_TO_CLASSES.items() if val == cls} for cls in ALL_FIELDS } + + +# Auto-generate __all__ to explicitly exclude any imported values +# +# [[[cog]]] +# import re +# +# with open(cog.inFile) as fp: +# src = fp.read() +# +# classes = re.findall(r"^class ((?:[A-Z]\w+)?Field)\b", src, re.MULTILINE) +# constants = re.findall(r"^(?!T_)([A-Z][A-Z_]+)(?:: [^=]+)? = ", src, re.MULTILINE) +# aliases = re.findall(r"^(\w+): TypeAlias\b", src, re.MULTILINE) +# extras = ["LinkSelf"] +# names = constants + sorted(classes + aliases + extras) +# +# cog.outl("\n\n__all__ = [") +# for name in names: +# if not name.startswith("_"): +# cog.outl(f' "{name}",') +# cog.outl("]") +# [[[out]]] + + +__all__ = [ + "ALL_FIELDS", + "READONLY_FIELDS", + "FIELD_TYPES_TO_CLASSES", + "FIELD_CLASSES_TO_TYPES", + "AITextField", + "AnyField", + "AttachmentsField", + "AutoNumberField", + "BarcodeField", + "ButtonField", + "CheckboxField", + "CollaboratorField", + "CountField", + "CreatedByField", + "CreatedTimeField", + "CurrencyField", + "DateField", + "DatetimeField", + "DurationField", + "EmailField", + "ExternalSyncSourceField", + "Field", + "FloatField", + "IntegerField", + "LastModifiedByField", + "LastModifiedTimeField", + "LinkField", + "LinkSelf", + "LookupField", + "ManualSortField", + "MultilineTextField", + "MultipleCollaboratorsField", + "MultipleSelectField", + "NumberField", + "PercentField", + "PhoneNumberField", + "RatingField", + "RequiredAITextField", + "RequiredBarcodeField", + "RequiredCollaboratorField", + "RequiredCountField", + "RequiredCurrencyField", + "RequiredDateField", + "RequiredDatetimeField", + "RequiredDurationField", + "RequiredEmailField", + "RequiredFloatField", + "RequiredIntegerField", + "RequiredMultilineTextField", + "RequiredNumberField", + "RequiredPercentField", + "RequiredPhoneNumberField", + "RequiredRatingField", + "RequiredRichTextField", + "RequiredSelectField", + "RequiredSingleLineTextField", + "RequiredTextField", + "RequiredUrlField", + "RichTextField", + "SelectField", + "SingleLineTextField", + "SingleLinkField", + "TextField", + "UrlField", +] +# [[[end]]] (sum: Wo7K0mAxiV) diff --git a/pyairtable/orm/generate.py b/pyairtable/orm/generate.py new file mode 100644 index 00000000..3d6d0673 --- /dev/null +++ b/pyairtable/orm/generate.py @@ -0,0 +1,235 @@ +""" +pyAirtable can generate ORM models that reflect the schema of an Airtable base. + +The simplest way to use this functionality is with the command line utility: + +.. code-block:: + + % pip install 'pyairtable[cli]' + % pyairtable base YOUR_BASE_ID orm > your_models.py +""" + +import re +from dataclasses import dataclass +from functools import cached_property +from typing import Any, Dict, List, Optional, Sequence, Type + +import inflection + +from pyairtable.api.base import Base +from pyairtable.api.table import Table +from pyairtable.models import schema as S +from pyairtable.models.schema import FieldType +from pyairtable.orm import fields + +_ANNOTATION_IMPORTS = { + "date": "from datetime import date", + "datetime": "from datetime import datetime", + "timedelta": "from datetime import timedelta", + "Any": "from typing import Any", + r"Union\[.+\]": "from typing import Union", +} + + +class ModelFileBuilder: + """ + Produces the code for a Python module file that contains ORM classes + representing all tables in the given base. + """ + + def __init__( + self, + base: Base, + table_ids: Optional[Sequence[str]] = None, + table_names: Optional[Sequence[str]] = None, + ): + """ + Args: + base: The base to use when inspecting table schemas. + table_ids: An optional list of table IDs to limit the output. + table_names: An optional list of table names to limit the output. + """ + table_ids = table_ids or [] + table_names = table_names or [] + tables = base.tables() + if table_names or table_ids: + tables = [t for t in tables if t.name in table_names or t.id in table_ids] + self.model_builders = [ModelBuilder(self, table) for table in tables] + + @cached_property + def model_lookup(self) -> Dict[str, "ModelBuilder"]: + return { + key: builder + for builder in self.model_builders + for key in (builder.table.id, builder.table.name) + } + + def __str__(self) -> str: + models_expr = "\n\n\n".join(str(builder) for builder in self.model_builders) + import_exprs = [ + "import os", + "from functools import partial", + *( + import_text + for import_expr, import_text in _ANNOTATION_IMPORTS.items() + if re.search(rf"\[{import_expr}\]", models_expr) + ), + ] + preamble = "\n".join( + [ + "from __future__ import annotations", + "", + *(line for line in import_exprs if line), + "", + "from pyairtable.orm import Model", + "from pyairtable.orm import fields as F", + ] + ) + all_expr = "\n".join( + [ + "__all__ = [", + *sorted(f" {b.class_name!r}," for b in self.model_builders), + "]", + ] + ) + return "\n\n\n".join([preamble, models_expr, all_expr]) + + +@dataclass +class ModelBuilder: + file_generator: ModelFileBuilder + table: Table + meta_envvar: str = "AIRTABLE_API_KEY" + + @property + def field_builders(self) -> List["FieldBuilder"]: + return [ + FieldBuilder(field_schema, lookup=self.file_generator.model_lookup) + for field_schema in self.table.schema().fields + ] + + @property + def class_name(self) -> str: + return table_class_name(self.table.schema().name) + + def __str__(self) -> str: + return "\n".join( + [ + f"class {self.class_name}(Model):", + " class Meta:", + f" api_key = partial(os.environ.get, {self.meta_envvar!r})", + f" base_id = {self.table.base.id!r}", + f" table_name = {self.table.schema().name!r}", + "", + *(f" {fg}" for fg in self.field_builders), + ] + ) + + +@dataclass +class FieldBuilder: + schema: S.FieldSchema + lookup: Dict[str, ModelBuilder] + + @property + def var_name(self) -> str: + return field_variable_name(self.schema.name) + + @property + def field_class(self) -> Type[fields.AnyField]: + field_type = self.schema.type + if isinstance(self.schema, (S.FormulaFieldSchema, S.RollupFieldSchema)): + if self.schema.options.result: + field_type = self.schema.options.result.type + if isinstance(self.schema, S.MultipleRecordLinksFieldSchema): + try: + self.lookup[self.schema.options.linked_table_id] + except KeyError: + return fields._ListField + return fields.FIELD_TYPES_TO_CLASSES[field_type] + + def __str__(self) -> str: + args: List[Any] = [self.schema.name] + kwargs: Dict[str, Any] = {} + generic = "" + cls = self.field_class + + if isinstance(self.schema, S.MultipleLookupValuesFieldSchema): + generic = lookup_field_type_annotation(self.schema) + + if cls is fields.LinkField: + assert isinstance(self.schema, S.MultipleRecordLinksFieldSchema) + linked_model = self.lookup[self.schema.options.linked_table_id] + kwargs["model"] = linked_model.class_name + generic = repr(linked_model.class_name) + + if cls is fields._ListField: + generic = "str" + + if self.schema.type in (FieldType.FORMULA, FieldType.ROLLUP): + assert isinstance(self.schema, (S.FormulaFieldSchema, S.RollupFieldSchema)) + cls = fields.Field + if self.schema.options.result: + cls = fields.FIELD_TYPES_TO_CLASSES[self.schema.options.result.type] + kwargs["readonly"] = True + + generic = generic and f"[{generic}]" + args_repr = [repr(arg) for arg in args] + args_repr.extend(f"{k}={v!r}" for (k, v) in kwargs.items()) + args_join = ", ".join(args_repr) + return f"{self.var_name} = F.{cls.__name__}{generic}({args_join})" + + +def table_class_name(table_name: str) -> str: + """ + Convert an Airtable table name into a Python class name. + """ + name = inflection.singularize(table_name) + name = re.sub(r"[^a-zA-Z0-9]+", " ", name).strip() + name = re.sub(r"([0-9]) +([0-9])", r"\1_\2", name) + name = re.sub(r"^([0-9])", r"_\1", name) + return "".join(part.capitalize() for part in name.split()) + + +def field_variable_name(field_name: str) -> str: + """ + Convert an Airtable field name into a Python variable name. + """ + name = re.sub(r"[^a-zA-Z0-9]+", " ", field_name) + name = name.strip().lower().replace(" ", "_") + name = re.sub(r"([0-9]) +([0-9])", r"\1_\2", name) + name = re.sub(r"^([0-9])", r"_\1", name) + return name + + +def lookup_field_type_annotation(schema: S.MultipleLookupValuesFieldSchema) -> str: + """ + Given the schema for a multipleLookupValues field, determine the type annotation + we should use when creating the field descriptor. + """ + if not schema.options.result: + return "Any" + lookup_type = schema.options.result.type + if lookup_type == FieldType.MULTIPLE_RECORD_LINKS: + return "str" # otherwise this will be 'list' + cls = fields.FIELD_TYPES_TO_CLASSES[lookup_type] + if isinstance(contained_type := getattr(cls, "contains_type", None), type): + return contained_type.__name__ + valid_types = _flatten(cls.valid_types) + if len(valid_types) == 1: + return valid_types[0].__name__ + return "Union[%s]" % ", ".join(t.__name__ for t in _flatten(cls.valid_types)) + + +def _flatten(class_info: fields._ClassInfo) -> List[Type[Any]]: + """ + Given a _ClassInfo tuple (which can contain multiple levels of nested tuples) + return a single list of all the actual types contained. + """ + if isinstance(class_info, type): + return [class_info] + flattened = [t for t in class_info if isinstance(t, type)] + for t in class_info: + if isinstance(t, tuple): + flattened.extend(_flatten(t)) # pragma: no cover + return flattened diff --git a/pyairtable/orm/lists.py b/pyairtable/orm/lists.py new file mode 100644 index 00000000..31fcb344 --- /dev/null +++ b/pyairtable/orm/lists.py @@ -0,0 +1,146 @@ +from contextlib import contextmanager +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Iterable, + Iterator, + List, + Optional, + SupportsIndex, + Union, + overload, +) + +from typing_extensions import Self, TypeVar + +from pyairtable.api.types import AttachmentDict, CreateAttachmentDict +from pyairtable.exceptions import ReadonlyFieldError, UnsavedRecordError + +T = TypeVar("T") + + +if TYPE_CHECKING: + # These would be circular imports if not for the TYPE_CHECKING condition. + from pyairtable.orm.fields import AnyField + from pyairtable.orm.model import Model + + +class ChangeTrackingList(List[T]): + """ + A list that keeps track of when its contents are modified. This allows us to know + if any mutations happened to the lists returned from linked record fields. + """ + + def __init__(self, *args: Iterable[T], field: "AnyField", model: "Model") -> None: + super().__init__(*args) + self._field = field + self._model = model + self._tracking_enabled = True + + @contextmanager + def disable_tracking(self) -> Iterator[Self]: + """ + Temporarily disable change tracking. + """ + prev = self._tracking_enabled + self._tracking_enabled = False + try: + yield self + finally: + self._tracking_enabled = prev + + def _on_change(self) -> None: + try: + if not self._tracking_enabled: + return + except AttributeError: + # This means we're being unpickled and won't call __init__. + return + self._model._changed[self._field.field_name] = True + + @overload + def __setitem__(self, index: SupportsIndex, value: T, /) -> None: ... + + @overload + def __setitem__(self, key: slice, value: Iterable[T], /) -> None: ... + + def __setitem__( + self, + index: Union[SupportsIndex, slice], + value: Union[T, Iterable[T]], + /, + ) -> None: + self._on_change() + return super().__setitem__(index, value) # type: ignore + + def __delitem__(self, key: Union[SupportsIndex, slice]) -> None: + self._on_change() + return super().__delitem__(key) + + def append(self, object: T) -> None: + self._on_change() + return super().append(object) + + def insert(self, index: SupportsIndex, object: T) -> None: + self._on_change() + return super().insert(index, object) + + def remove(self, value: T) -> None: + self._on_change() + return super().remove(value) + + def clear(self) -> None: + self._on_change() + return super().clear() + + def extend(self, iterable: Iterable[T]) -> None: + self._on_change() + return super().extend(iterable) + + def pop(self, index: SupportsIndex = -1) -> T: + self._on_change() + return super().pop(index) + + +class AttachmentsList(ChangeTrackingList[Union[AttachmentDict, CreateAttachmentDict]]): + def upload( + self, + filename: Union[str, Path], + content: Optional[Union[str, bytes]] = None, + content_type: Optional[str] = None, + ) -> None: + """ + Upload an attachment to the Airtable API and refresh the field's values. + + This method will replace the current list with the response from the server, + which will contain a list of :class:`~pyairtable.api.types.AttachmentDict` for + all attachments in the field (not just the ones uploaded). + + You do not need to call :meth:`~pyairtable.orm.Model.save`; the new attachment + will be saved immediately. Note that this means any other unsaved changes to + this field will be lost. + + Example: + >>> model.attachments.upload("example.jpg", b"...", "image/jpeg") + >>> model.attachments[-1]["filename"] + 'example.jpg' + >>> model.attachments[-1]["url"] + 'https://v5.airtableusercontent.com/...' + """ + if not self._model.id: + raise UnsavedRecordError("cannot upload attachments to an unsaved record") + if self._field.readonly: + raise ReadonlyFieldError("cannot upload attachments to a readonly field") + response = self._model.meta.table.upload_attachment( + self._model.id, + self._field.field_name, + filename=filename, + content=content, + content_type=content_type, + ) + attachments = list(response["fields"].values()).pop(0) + with self.disable_tracking(): + self.clear() + # We only ever expect one key: value in `response["fields"]`. + # See https://airtable.com/developers/web/api/upload-attachment + self.extend(attachments) diff --git a/pyairtable/orm/model.py b/pyairtable/orm/model.py index 97897382..06d1d56a 100644 --- a/pyairtable/orm/model.py +++ b/pyairtable/orm/model.py @@ -1,9 +1,27 @@ -from functools import lru_cache -from typing import Any, Dict, Iterable, List, Optional +import dataclasses +import datetime +import warnings +from dataclasses import dataclass +from functools import cached_property +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Iterable, + List, + Mapping, + Optional, + Set, + Type, + Union, + cast, +) from typing_extensions import Self as SelfType -from pyairtable.api.api import Api +from pyairtable.api import retrying +from pyairtable.api.api import Api, TimeoutTuple from pyairtable.api.base import Base from pyairtable.api.table import Table from pyairtable.api.types import ( @@ -13,9 +31,13 @@ UpdateRecordDict, WritableFields, ) -from pyairtable.formulas import OR, STR_VALUE +from pyairtable.formulas import EQ, OR, RECORD_ID from pyairtable.models import Comment from pyairtable.orm.fields import AnyField, Field +from pyairtable.utils import datetime_from_iso_str, datetime_to_iso_str + +if TYPE_CHECKING: + from builtins import _ClassInfo class Model: @@ -23,7 +45,7 @@ class Model: Supports creating ORM-style classes representing Airtable tables. For more details, see :ref:`orm`. - A nested class called ``Meta`` is required and can specify + A nested class or dict called ``Meta`` is required and can specify the following attributes: * ``api_key`` (required) - API key or personal access token. @@ -31,15 +53,21 @@ class Model: * ``table_name`` (required) - Table ID or name. * ``timeout`` - A tuple indicating a connect and read timeout. Defaults to no timeout. * ``typecast`` - |kwarg_typecast| Defaults to ``True``. + * ``retry`` - An instance of `urllib3.util.Retry `_. + If ``None`` or ``False``, requests will not be retried. + If ``True``, the default strategy will be applied + (see :func:`~pyairtable.retry_strategy` for details). + * ``use_field_ids`` - Whether fields will be defined by ID, rather than name. Defaults to ``False``. + * ``memoize`` - Whether the model should reuse models it creates between requests. + See :ref:`Memoizing linked records` for more information. + + For example, the following two are equivalent: .. code-block:: python from pyairtable.orm import Model, fields class Contact(Model): - first_name = fields.TextField("First Name") - age = fields.IntegerField("Age") - class Meta: base_id = "appaPqizdsNHDvlEm" table_name = "Contact" @@ -47,18 +75,33 @@ class Meta: timeout = (5, 5) typecast = True - You can implement meta attributes as callables if certain values - need to be dynamically provided or are unavailable at import time: + first_name = fields.TextField("First Name") + age = fields.IntegerField("Age") .. code-block:: python from pyairtable.orm import Model, fields - from your_app.config import get_secret class Contact(Model): + Meta = { + "base_id": "appaPqizdsNHDvlEm", + "table_name": "Contact", + "api_key": "keyapikey", + "timeout": (5, 5), + "typecast": True, + } first_name = fields.TextField("First Name") age = fields.IntegerField("Age") + You can implement meta attributes as callables if certain values + need to be dynamically provided or are unavailable at import time: + + .. code-block:: python + + from pyairtable.orm import Model, fields + from your_app.config import get_secret + + class Contact(Model): class Meta: base_id = "appaPqizdsNHDvlEm" table_name = "Contact" @@ -66,26 +109,58 @@ class Meta: @staticmethod def api_key(): return get_secret("AIRTABLE_API_KEY") + + first_name = fields.TextField("First Name") + age = fields.IntegerField("Age") """ + #: The Airtable record ID for this instance. If empty, the instance + #: has never been saved to the API. id: str = "" - created_time: str = "" + + #: The time when the Airtable record was created. If empty, the instance + #: has never been saved to (or fetched from) the API. + created_time: Optional[datetime.datetime] = None + + #: The number of comments on this record. Only populated if the record was + #: fetched with ``count_comments=True``. + comment_count: Optional[int] = None + + #: A wrapper allowing type-annotated access to ORM configuration. + meta: ClassVar["_Meta"] + _deleted: bool = False + _fetched: bool = False _fields: Dict[FieldName, Any] + _changed: Dict[FieldName, bool] + _memoized: ClassVar[Dict[RecordId, SelfType]] def __init_subclass__(cls, **kwargs: Any): + cls.meta = _Meta(cls) + cls._memoized = {} cls._validate_class() super().__init_subclass__(**kwargs) - def __repr__(self) -> str: - if not self.id: - return f"" - return f"<{self.__class__.__name__} id={self.id!r}>" + @classmethod + def _validate_class(cls) -> None: + # Verify required Meta attributes were set (but don't call any callables) + assert cls.meta.get("api_key", required=True, call=False) + assert cls.meta.get("base_id", required=True, call=False) + assert cls.meta.get("table_name", required=True, call=False) + + model_attributes = [a for a in cls.__dict__.keys() if not a.startswith("__")] + overridden = set(model_attributes).intersection(Model.__dict__.keys()) + if overridden: + raise ValueError( + "Class {cls} fields clash with existing method: {name}".format( + cls=cls.__name__, name=overridden + ) + ) @classmethod def _attribute_descriptor_map(cls) -> Dict[str, AnyField]: """ - Returns a dictionary mapping the model's attribute names to the field's + Build a mapping of the model's attribute names to field descriptor instances. >>> class Test(Model): ... first_name = TextField("First Name") @@ -102,7 +177,7 @@ def _attribute_descriptor_map(cls) -> Dict[str, AnyField]: @classmethod def _field_name_descriptor_map(cls) -> Dict[FieldName, AnyField]: """ - Returns a dictionary that maps field names to descriptor instances. + Build a mapping of the model's field names to field descriptor instances. >>> class Test(Model): ... first_name = TextField("First Name") @@ -116,26 +191,9 @@ def _field_name_descriptor_map(cls) -> Dict[FieldName, AnyField]: """ return {f.field_name: f for f in cls._attribute_descriptor_map().values()} - @classmethod - def _field_name_attribute_map(cls) -> Dict[FieldName, str]: - """ - Returns a dictionary that maps field names to attribute names. - - >>> class Test(Model): - ... first_name = TextField("First Name") - ... age = NumberField("Age") - ... - >>> Test._field_name_attribute_map() - >>> { - ... "First Name": "first_name" - ... "Age": "age" - ... } - """ - return {v.field_name: k for k, v in cls._attribute_descriptor_map().items()} - def __init__(self, **fields: Any): """ - Constructs a model instance with field values based on the given keyword args. + Construct a model instance with field values based on the given keyword args. >>> Contact(name="Alice", birthday=date(1980, 1, 1)) @@ -146,8 +204,10 @@ def __init__(self, **fields: Any): """ - if "id" in fields: + try: self.id = fields.pop("id") + except KeyError: + pass # Field values in internal (not API) representation self._fields = {} @@ -158,54 +218,13 @@ def __init__(self, **fields: Any): raise AttributeError(key) setattr(self, key, value) - @classmethod - def _get_meta(cls, name: str, default: Any = None, required: bool = False) -> Any: - if not hasattr(cls, "Meta"): - raise AttributeError(f"{cls.__name__}.Meta must be defined") - if required and not hasattr(cls.Meta, name): - raise ValueError(f"{cls.__name__}.Meta.{name} must be defined") - value = getattr(cls.Meta, name, default) - if callable(value): - value = value() - if required and value is None: - raise ValueError(f"{cls.__name__}.Meta.{name} cannot be None") - return value - - @classmethod - def _validate_class(cls) -> None: - # Verify required Meta attributes were set - assert cls._get_meta("api_key", required=True) - assert cls._get_meta("base_id", required=True) - assert cls._get_meta("table_name", required=True) - - model_attributes = [a for a in cls.__dict__.keys() if not a.startswith("__")] - overridden = set(model_attributes).intersection(Model.__dict__.keys()) - if overridden: - raise ValueError( - "Class {cls} fields clash with existing method: {name}".format( - cls=cls.__name__, name=overridden - ) - ) + # Only start tracking changes after the object is created + self._changed = {} - @classmethod - @lru_cache - def get_api(cls) -> Api: - return Api( - api_key=cls._get_meta("api_key"), - timeout=cls._get_meta("timeout"), - ) - - @classmethod - def get_base(cls) -> Base: - return cls.get_api().base(cls._get_meta("base_id")) - - @classmethod - def get_table(cls) -> Table: - return cls.get_base().table(cls._get_meta("table_name")) - - @classmethod - def _typecast(cls) -> bool: - return bool(cls._get_meta("typecast", default=True)) + def __repr__(self) -> str: + if not self.id: + return f"" + return f"<{self.__class__.__name__} id={self.id!r}>" def exists(self) -> bool: """ @@ -213,69 +232,112 @@ def exists(self) -> bool: """ return bool(self.id) - def save(self) -> bool: + def save(self, *, force: bool = False) -> "SaveResult": """ - Saves or updates a model. + Save the model to the API. If the instance does not exist already, it will be created; - otherwise, the existing record will be updated. + otherwise, the existing record will be updated, using only the + fields which have been modified since it was retrieved. - Returns ``True`` if a record was created and ``False`` if it was updated. + Args: + force: If ``True``, all fields will be saved, even if they have not changed. """ if self._deleted: raise RuntimeError(f"{self.id} was deleted") - table = self.get_table() - fields = self.to_record(only_writable=True)["fields"] - if not self.id: - record = table.create(fields, typecast=self._typecast()) - did_create = True - else: - record = table.update(self.id, fields, typecast=self._typecast()) - did_create = False + field_values = self.to_record(only_writable=True)["fields"] - self.id = record["id"] - self.created_time = record["createdTime"] - return did_create + if not self.id: + record = self.meta.table.create( + field_values, + typecast=self.meta.typecast, + use_field_ids=self.meta.use_field_ids, + ) + self.id = record["id"] + self.created_time = datetime_from_iso_str(record["createdTime"]) + self._changed.clear() + return SaveResult(self.id, created=True, field_names=set(field_values)) + + if not force: + if not self._changed: + return SaveResult(self.id) + field_values = { + field_name: value + for field_name, value in field_values.items() + if self._changed.get(field_name) + } + + self.meta.table.update( + self.id, + field_values, + typecast=self.meta.typecast, + use_field_ids=self.meta.use_field_ids, + ) + self._changed.clear() + return SaveResult( + self.id, forced=force, updated=True, field_names=set(field_values) + ) def delete(self) -> bool: """ - Deletes the record. + Delete the record. Raises: ValueError: if the record does not exist. """ if not self.id: raise ValueError("cannot be deleted because it does not have id") - table = self.get_table() + table = self.meta.table result = table.delete(self.id) self._deleted = True # Is it even possible to get "deleted" False? return bool(result["deleted"]) @classmethod - def all(cls, **kwargs: Any) -> List[SelfType]: + def all(cls, *, memoize: Optional[bool] = None, **kwargs: Any) -> List[SelfType]: """ - Returns all records for this model. For all supported + Retrieve all records for this model. For all supported keyword arguments, see :meth:`Table.all `. + + Args: + memoize: |kwarg_orm_memoize| """ - table = cls.get_table() - return [cls.from_record(record) for record in table.all(**kwargs)] + kwargs.update(cls.meta.request_kwargs) + return [ + cls.from_record(record, memoize=memoize) + for record in cls.meta.table.all(**kwargs) + ] @classmethod - def first(cls, **kwargs: Any) -> Optional[SelfType]: + def first( + cls, *, memoize: Optional[bool] = None, **kwargs: Any + ) -> Optional[SelfType]: """ - Returns the first record for this model. For all supported + Retrieve the first record for this model. For all supported keyword arguments, see :meth:`Table.first `. + + Args: + memoize: |kwarg_orm_memoize| """ - table = cls.get_table() - if record := table.first(**kwargs): - return cls.from_record(record) + kwargs.update(cls.meta.request_kwargs) + if record := cls.meta.table.first(**kwargs): + return cls.from_record(record, memoize=memoize) return None + @classmethod + def _maybe_memoize(cls, instance: SelfType, memoize: Optional[bool]) -> None: + """ + If memoization is enabled, save the instance to the memoization cache. + """ + memoize = cls.meta.memoize if memoize is None else memoize + if memoize: + cls._memoized[instance.id] = instance + def to_record(self, only_writable: bool = False) -> RecordDict: """ - Returns a dictionary object as an Airtable record. + Build a :class:`~pyairtable.api.types.RecordDict` to represent this instance. + This method converts internal field values into values expected by Airtable. For example, a ``datetime`` value from :class:`~pyairtable.orm.fields.DatetimeField` is converted into an ISO 8601 string. @@ -290,18 +352,29 @@ def to_record(self, only_writable: bool = False) -> RecordDict: for field, value in self._fields.items() if not (map_[field].readonly and only_writable) } - return {"id": self.id, "createdTime": self.created_time, "fields": fields} + ct = datetime_to_iso_str(self.created_time) if self.created_time else "" + return {"id": self.id, "createdTime": ct, "fields": fields} @classmethod - def from_record(cls, record: RecordDict) -> SelfType: + def from_record( + cls, record: RecordDict, *, memoize: Optional[bool] = None + ) -> SelfType: """ Create an instance from a record dict. + + Args: + record: The record data from the Airtable API. + memoize: |kwarg_orm_memoize| """ name_field_map = cls._field_name_descriptor_map() # Convert Column Names into model field names field_values = { # Use field's to_internal_value to cast into model fields - field: name_field_map[field].to_internal_value(value) + field: ( + name_field_map[field].to_internal_value(value) + if value is not None + else None + ) for (field, value) in record["fields"].items() # Silently proceed if Airtable returns fields we don't recognize if field in name_field_map @@ -310,46 +383,58 @@ def from_record(cls, record: RecordDict) -> SelfType: # any readonly fields, instead we directly set instance._fields. instance = cls(id=record["id"]) instance._fields = field_values - instance.created_time = record["createdTime"] + instance._fetched = True + instance.created_time = datetime_from_iso_str(record["createdTime"]) + instance.comment_count = record.get("commentCount") + cls._maybe_memoize(instance, memoize) return instance @classmethod def from_id( cls, record_id: RecordId, + *, fetch: bool = True, + memoize: Optional[bool] = None, ) -> SelfType: """ Create an instance from a record ID. Args: record_id: |arg_record_id| - fetch: If ``True``, record will be fetched and field values will be - updated. If ``False``, a new instance is created with the provided ID, - but field values are unset. - """ - instance = cls(id=record_id) - if fetch: + fetch: |kwarg_orm_fetch| + memoize: |kwarg_orm_memoize| + """ + try: + instance = cast(SelfType, cls._memoized[record_id]) # type: ignore[redundant-cast] + except KeyError: + instance = cls(id=record_id) + if fetch and not instance._fetched: instance.fetch() + cls._maybe_memoize(instance, memoize) return instance def fetch(self) -> None: """ - Fetches field values from the API and resets instance field values. + Fetch field values from the API and resets instance field values. """ if not self.id: raise ValueError("cannot be fetched because instance does not have an id") - record = self.get_table().get(self.id) - unused = self.from_record(record) + record = self.meta.table.get(self.id, **self.meta.request_kwargs) + unused = self.from_record(record, memoize=False) self._fields = unused._fields + self._changed.clear() + self._fetched = True self.created_time = unused.created_time @classmethod def from_ids( cls, record_ids: Iterable[RecordId], + *, fetch: bool = True, + memoize: Optional[bool] = None, ) -> List[SelfType]: """ Create a list of instances from record IDs. If any record IDs returned @@ -358,27 +443,36 @@ def from_ids( Args: record_ids: |arg_record_id| - fetch: If ``True``, records will be fetched and field values will be - updated. If ``False``, new instances are created with the provided IDs, - but field values are unset. + fetch: |kwarg_orm_fetch| + memoize: |kwarg_orm_memoize| """ - record_ids = list(record_ids) if not fetch: return [cls.from_id(record_id, fetch=False) for record_id in record_ids] - formula = OR( - *[f"RECORD_ID()={STR_VALUE(record_id)}" for record_id in record_ids] - ) - records = [ - cls.from_record(record) for record in cls.get_table().all(formula=formula) - ] - records_by_id = {record.id: record for record in records} + + record_ids = list(record_ids) + by_id: Dict[RecordId, SelfType] = {} + + if cls._memoized: + for record_id in record_ids: + try: + by_id[record_id] = cast(SelfType, cls._memoized[record_id]) # type: ignore[redundant-cast] + except KeyError: + pass + + if remaining := sorted(set(record_ids) - set(by_id)): + # Only retrieve records that aren't already memoized + formula = OR(EQ(RECORD_ID(), record_id) for record_id in sorted(remaining)) + by_id.update( + {obj.id: obj for obj in cls.all(formula=formula, memoize=memoize)} + ) + # Ensure we return records in the same order, and raise KeyError if any are missing - return [records_by_id[record_id] for record_id in record_ids] + return [by_id[record_id] for record_id in record_ids] @classmethod def batch_save(cls, models: List[SelfType]) -> None: """ - Saves a list of model instances to the Airtable API with as few + Save a list of model instances to the Airtable API with as few network requests as possible. Can accept a mixture of new records (which have not been saved yet) and existing records that have IDs. """ @@ -398,17 +492,26 @@ def batch_save(cls, models: List[SelfType]) -> None: if (record := model.to_record(only_writable=True)) ] - table = cls.get_table() - table.batch_update(update_records, typecast=cls._typecast()) - created_records = table.batch_create(create_records, typecast=cls._typecast()) - for model, created_record in zip(create_models, created_records): - model.id = created_record["id"] - model.created_time = created_record["createdTime"] + if update_records: + cls.meta.table.batch_update( + update_records, + typecast=cls.meta.typecast, + use_field_ids=cls.meta.use_field_ids, + ) + if create_records: + created_records = cls.meta.table.batch_create( + create_records, + typecast=cls.meta.typecast, + use_field_ids=cls.meta.use_field_ids, + ) + for model, record in zip(create_models, created_records): + model.id = record["id"] + model.created_time = datetime_from_iso_str(record["createdTime"]) @classmethod def batch_delete(cls, models: List[SelfType]) -> None: """ - Deletes a list of model instances from Airtable. + Delete a list of model instances from Airtable. Raises: ValueError: if the model has not been saved to Airtable. @@ -417,18 +520,199 @@ def batch_delete(cls, models: List[SelfType]) -> None: raise ValueError("cannot delete an unsaved model") if not all(isinstance(model, cls) for model in models): raise TypeError(set(type(model) for model in models)) - cls.get_table().batch_delete([model.id for model in models]) + cls.meta.table.batch_delete([model.id for model in models]) def comments(self) -> List[Comment]: """ Return a list of comments on this record. See :meth:`Table.comments `. """ - return self.get_table().comments(self.id) + return self.meta.table.comments(self.id) def add_comment(self, text: str) -> Comment: """ Add a comment to this record. See :meth:`Table.add_comment `. """ - return self.get_table().add_comment(self.id, text) + return self.meta.table.add_comment(self.id, text) + + +@dataclass +class _Meta: + """ + Wrapper around a Model.Meta class that provides easier, typed access to + configuration values (which may or may not be defined in the original class). + """ + + model: Type[Model] + + @property + def _config(self) -> Mapping[str, Any]: + if not (meta := getattr(self.model, "Meta", None)): + raise AttributeError(f"{self.model.__name__}.Meta must be defined") + if isinstance(meta, dict): + return meta + try: + return cast(Mapping[str, Any], meta.__dict__) + except AttributeError: + raise TypeError( + f"{self.model.__name__}.Meta must be a dict or class; got {type(meta)}" + ) + + def get( + self, + name: str, + default: Any = None, + required: bool = False, + call: bool = True, + check_types: Optional["_ClassInfo"] = None, + ) -> Any: + """ + Given a name, retrieve the model configuration with that name. + + Args: + default: The default value to use if the name is not defined. + required: If ``True``, raises ``ValueError`` if the name is undefined or None. + call: If ``False``, does not execute any callables to retrieve this value; + it will consider the callable itself as the value. + check_types: If set, will raise a ``TypeError`` if the value is not + an instance of the given type(s). + """ + if required and name not in self._config: + raise ValueError(f"{self.model.__name__}.Meta.{name} must be defined") + value = self._config.get(name, default) + if callable(value) and call: + value = value() + if required and value is None: + raise ValueError(f"{self.model.__name__}.Meta.{name} cannot be None") + if check_types is not None and not isinstance(value, check_types): + raise TypeError(f"expected {check_types!r}; got {type(value)}") + return value + + @property + def api_key(self) -> str: + return str(self.get("api_key", required=True)) + + @property + def timeout(self) -> Optional[TimeoutTuple]: + return self.get( # type: ignore[no-any-return] + "timeout", + default=None, + check_types=(type(None), tuple), + ) + + @property + def retry_strategy(self) -> Optional[Union[bool, retrying.Retry]]: + return self.get( # type: ignore[no-any-return] + "retry", + default=True, + check_types=(type(None), bool, retrying.Retry), + ) + + @cached_property + def api(self) -> Api: + return Api( + self.api_key, + timeout=self.timeout, + retry_strategy=self.retry_strategy, + ) + + @property + def base_id(self) -> str: + return str(self.get("base_id", required=True)) + + @property + def base(self) -> Base: + return self.api.base(self.base_id) + + @property + def table_name(self) -> str: + return str(self.get("table_name", required=True)) + + @property + def table(self) -> Table: + return self.base.table(self.table_name) + + @property + def typecast(self) -> bool: + return bool(self.get("typecast", default=True)) + + @property + def use_field_ids(self) -> bool: + return bool(self.get("use_field_ids", default=False)) + + @property + def memoize(self) -> bool: + return bool(self.get("memoize", default=False)) + + @property + def request_kwargs(self) -> Dict[str, Any]: + return { + "user_locale": None, + "cell_format": "json", + "time_zone": None, + "use_field_ids": self.use_field_ids, + } + + +@dataclass(frozen=True) +class SaveResult: + """ + Represents the result of saving a record to the API. The result's + attributes contain more granular information about the save operation: + + >>> result = model.save() + >>> result.record_id + 'recWPqD9izdsNvlE' + >>> result.created + False + >>> result.updated + True + >>> result.forced + False + >>> result.field_names + {'Name', 'Email'} + + If none of the model's fields have changed, calling :meth:`~pyairtable.orm.Model.save` + will not perform any API requests and will return a SaveResult with no changes. + + >>> model = YourModel() + >>> result = model.save() + >>> result.saved + True + >>> second_result = model.save() + >>> second_result.saved + False + + For backwards compatibility, instances of SaveResult will evaluate as truthy + if the record was created, and falsy if the record was not created. + """ + + record_id: RecordId + created: bool = False + updated: bool = False + forced: bool = False + field_names: Set[FieldName] = dataclasses.field(default_factory=set) + + def __bool__(self) -> bool: + """ + Returns ``True`` if the record was created. This is for backwards compatibility + with the behavior of :meth:`~pyairtable.orm.Model.save` prior to the 3.0 release, + which returned a boolean indicating whether a record was created. + """ + warnings.warn( + "Model.save() now returns SaveResult instead of bool; switch" + " to checking Model.save().created instead before the 4.0 release.", + DeprecationWarning, + stacklevel=2, + ) + return self.created + + @property + def saved(self) -> bool: + """ + Whether the record was saved to the API. If ``False``, this indicates there + were no changes to the model and the :meth:`~pyairtable.orm.Model.save` + operation was not forced. + """ + return self.created or self.updated diff --git a/pyairtable/testing.py b/pyairtable/testing.py index 9cf0e04a..4f198e82 100644 --- a/pyairtable/testing.py +++ b/pyairtable/testing.py @@ -1,17 +1,63 @@ """ -Helper functions for writing tests that use the pyairtable library. +pyAirtable provides a number of helper functions for testing code that uses +the Airtable API. These functions are designed to be used with the standard +Python :mod:`unittest.mock` library, and can be used to create fake records, +users, and attachments, as well as to mock the Airtable API itself. """ + import datetime +import inspect +import mimetypes import random import string -from typing import Any, Optional +from collections import defaultdict +from contextlib import ExitStack, contextmanager +from functools import partialmethod +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, + overload, +) +from unittest import mock + +import urllib3 +from typing_extensions import Self, TypeAlias + +from pyairtable.api import retrying +from pyairtable.api.api import Api, TimeoutTuple +from pyairtable.api.table import Table +from pyairtable.api.types import ( + AnyRecordDict, + AttachmentDict, + CollaboratorDict, + CreateRecordDict, + FieldName, + Fields, + RecordDeletedDict, + RecordDict, + RecordId, + UpdateRecordDict, + UpsertResultDict, + WritableFields, +) +from pyairtable.utils import fieldgetter, is_airtable_id + -from pyairtable.api.types import AttachmentDict, CollaboratorDict, Fields, RecordDict +def _now() -> str: + return datetime.datetime.now().isoformat() + "Z" def fake_id(type: str = "rec", value: Any = None) -> str: """ - Generates a fake Airtable-style ID. + Generate a fake Airtable-style ID. Args: type: the object type prefix, defaults to "rec" @@ -30,14 +76,28 @@ def fake_id(type: str = "rec", value: Any = None) -> str: def fake_meta( - base_id: str = "appFakeTestingApp", - table_name: str = "tblFakeTestingTbl", + base_id: str = "", + table_name: str = "", api_key: str = "patFakePersonalAccessToken", + timeout: Optional[TimeoutTuple] = None, + retry: Optional[Union[bool, retrying.Retry]] = None, + typecast: bool = True, + use_field_ids: bool = False, + memoize: bool = False, ) -> type: """ - Returns a ``Meta`` class for inclusion in a ``Model`` subclass. + Generate a ``Meta`` class for inclusion in a ``Model`` subclass. """ - attrs = {"base_id": base_id, "table_name": table_name, "api_key": api_key} + attrs = { + "base_id": base_id or fake_id("app"), + "table_name": table_name or fake_id("tbl"), + "api_key": api_key, + "timeout": timeout, + "retry": retry, + "typecast": typecast, + "use_field_ids": use_field_ids, + "memoize": memoize, + } return type("Meta", (), attrs) @@ -47,34 +107,510 @@ def fake_record( **other_fields: Any, ) -> RecordDict: """ - Returns a fake record dict with the given field values. + Generate a fake record dict with the given field values. >>> fake_record({"Name": "Alice"}) - {'id': '...', 'createdTime': '...', 'fields': {'Name': 'Alice'}} + { + 'id': '...', + 'createdTime': '...', + 'fields': {'name': 'Alice'} + } - >>> fake_record(name='Alice', address='123 Fake St') - {'id': '...', 'createdTime': '...', 'fields': {'name': 'Alice', 'address': '123 Fake St'}} + >>> fake_record(name="Alice", id="123") + { + 'id': 'rec00000000000123', + 'createdTime': '...', + 'fields': {'name': 'Alice'} + } - >>> fake_record(name='Alice', id='123') - {'id': 'rec00000000000123', 'createdTime': '...', 'fields': {'name': 'Alice'}} + >>> fake_record(name="Alice", id="recABC00000000123") + { + 'id': 'recABC00000000123', + 'createdTime': '...', + 'fields': {'name': 'Alice'} + } """ return { - "id": fake_id(value=id), - "createdTime": datetime.datetime.now().isoformat() + "Z", + "id": str(id) if is_airtable_id(id, "rec") else fake_id(value=id), + "createdTime": _now(), "fields": {**(fields or {}), **other_fields}, } def fake_user(value: Any = None) -> CollaboratorDict: + """ + Generate a fake user dict with the given value for an email prefix. + + >>> fake_user("Alice") + { + 'id': 'usr000000000Alice', + 'email': 'alice@example.com' + 'name': 'Alice' + } + """ id = fake_id("usr", value) - return {"id": id, "email": f"{value or id}@example.com", "name": "Fake User"} + return { + "id": id, + "email": f"{str(value or id).lower()}@example.com", + "name": str(value or "Fake User"), + } + +def fake_attachment(url: str = "", filename: str = "") -> AttachmentDict: + """ + Generate a fake attachment dict. -def fake_attachment() -> AttachmentDict: + >>> fake_attachment() + { + 'id': 'att...', + 'url': 'https://example.com/', + 'filename': 'foo.txt', + 'size': 100, + 'type': 'text/plain', + } + + >>> fake_attachment('https://example.com/image.png', 'foo.png') + { + 'id': 'att...', + 'url': 'https://example.com/image.png', + 'filename': 'foo.png', + 'size': 100, + 'type': 'text/plain', + } + """ + if not filename: + filename = (urllib3.util.parse_url(url).path or "").split("/")[-1] + filename = filename or "foo.txt" return { "id": fake_id("att"), - "url": "https://example.com/", - "filename": "foo.txt", + "url": url or "https://example.com/", + "filename": filename, "size": 100, - "type": "text/plain", + "type": mimetypes.guess_type(filename)[0] or "text/plain", + } + + +BaseAndTableId: TypeAlias = Tuple[str, str] + + +class MockAirtable: + """ + This class acts as a context manager which mocks several pyAirtable APIs, + so that your tests can operate against tables without making network requests. + + .. code-block:: python + + from pyairtable import Api + from pyairtable.testing import MockAirtable + + table = Api.base("baseId").table("tableName") + + with MockAirtable() as m: + m.add_records(table, [{"Name": "Alice"}]) + records = table.all() + assert len(table.all()) == 1 + + If you use pytest, you might want to include this as a fixture. + + .. code-block:: python + + import pytest + from pyairtable.testing import MockAirtable + + @pytest.fixture(autouse=True) + def mock_airtable(): + with MockAirtable() as m: + yield m + + def test_your_function(): + ... + + Not all API methods are supported; if your test calls a method that would + make a network request, a RuntimeError will be raised instead. + + >>> with MockAirtable() as m: + ... table.schema() + ... + Traceback (most recent call last): ... + RuntimeError: unhandled call to Api.request + + You can allow unhandled requests by setting the ``passthrough`` argument to True, + either on the constructor or temporarily on the MockAirtable instance. This is + useful when using another library, like `requests-mock `_, + to prepare responses for complex cases (like code that retrieves the schema). + + .. code-block:: python + + def test_your_function(requests_mock, mock_airtable, monkeypatch): + base = Api.base("baseId") + + # load and cache our mock schema + requests_mock.get( + base.meta_url("tables"), + json={"tables": [...]} + ) + with mock_airtable.enable_passthrough(): + base.schema() + + # code below will fail if any more unhandled requests are made + ... + + """ + + # The list of APIs that are mocked by this class. + mocked = [ + "Api.request", + "Table.iterate", + "Table.get", + "Table.create", + "Table.update", + "Table.delete", + "Table.batch_create", + "Table.batch_update", + "Table.batch_delete", + "Table.batch_upsert", + ] + + # 2-layer mapping of (base, table) IDs --> record IDs --> record dicts. + records: Dict[BaseAndTableId, Dict[RecordId, RecordDict]] + + _stack: Optional[ExitStack] + _mocks: Dict[str, Any] + + def __init__(self, passthrough: bool = False) -> None: + """ + Args: + passthrough: if True, unmocked methods will still be allowed to + perform real network requests. If False, they will raise an error. + """ + self.passthrough = passthrough + self._reset() + + def _reset(self) -> None: + self._stack = None + self._mocks = {} + self.records = defaultdict(dict) + + def __enter__(self) -> Self: + if self._stack: + raise RuntimeError("MockAirtable is not reentrant") + if hasattr(Api.request, "mock"): + raise RuntimeError("MockAirtable cannot be nested") + self._reset() + self._stack = ExitStack() + + for name in self.mocked: + side_effect_name = name.replace(".", "_").lower() + side_effect = getattr(self, f"_{side_effect_name}", None) + mocked_method = self._mocks[name] = mock.patch( + f"pyairtable.{name}", + side_effect=side_effect, + autospec=True, + ) + self._stack.enter_context(mocked_method) + + return self + + def __exit__(self, *exc_info: Any) -> None: + if self._stack: + self._stack.__exit__(*exc_info) + + @contextmanager + def set_passthrough(self, allowed: bool) -> Iterator[Self]: + """ + Context manager that temporarily changes whether unmocked methods + are allowed to perform real network requests. For convenience, there are + also shortcuts ``enable_passthrough()`` and ``disable_passthrough()``. + + Usage: + + .. code-block:: python + + with MockAirtable() as m: + with m.enable_passthrough(): + schema = base.schema() + hooks = table.webhooks() + + # no more network requests allowed + ... + + Args: + allowed: If ``True``, unmocked methods will be allowed to perform real + network requests within this context manager. If ``False``, + they will not be allowed. + """ + original = self.passthrough + self.passthrough = allowed + try: + yield self + finally: + self.passthrough = original + + enable_passthrough = partialmethod(set_passthrough, True) + disable_passthrough = partialmethod(set_passthrough, False) + + @overload + def add_records( + self, + base_id: str, + table_id_or_name: str, + /, + records: Iterable[Dict[str, Any]], + ) -> List[RecordDict]: ... + + @overload + def add_records( + self, + table: Table, + /, + records: Iterable[Dict[str, Any]], + ) -> List[RecordDict]: ... + + def add_records(self, *args: Any, **kwargs: Any) -> List[RecordDict]: + """ + Add a list of records to the mock Airtable instance. These will be returned + from methods like :meth:`~pyairtable.Table.all` and :meth:`~pyairtable.Table.get`. + + Can be called with either a base ID and table name, + or an instance of :class:`~pyairtable.Table`: + + .. code-block:: + + m = MockAirtable() + m.add_records("baseId", "tableName", [{"Name": "Alice"}]) + m.add_records(table, records=[{"id": "recFake", {"Name": "Alice"}}]) + + .. note:: + + The parameters to :meth:`~pyairtable.Table.all` are not supported by MockAirtable, + and constraints like ``formula=`` and ``limit=`` will be ignored. It is assumed + that you are adding records to specifically test a particular use case. + MockAirtable is not a full in-memory replacement for the Airtable API. + + Args: + base_id: |arg_base_id| + *This must be the first positional argument.* + table_id_or_name: |arg_table_id_or_name| + This should be the same ID or name used in the code under test. + *This must be the second positional argument.* + table: An instance of :class:`~pyairtable.Table`. + *This is an alternative to providing base and table IDs, + and must be the first positional argument.* + records: A sequence of :class:`~pyairtable.api.types.RecordDict`, + :class:`~pyairtable.api.types.UpdateRecordDict`, + :class:`~pyairtable.api.types.CreateRecordDict`, + or :class:`~pyairtable.api.types.Fields`. + """ + base_id, table_name, records = _extract_args(args, kwargs, ["records"]) + coerced = [coerce_fake_record(record) for record in records] + self.records[(base_id, table_name)].update( + {record["id"]: record for record in coerced} + ) + return coerced + + @overload + def set_records( + self, + base_id: str, + table_id_or_name: str, + /, + records: Iterable[Dict[str, Any]], + ) -> None: ... + + @overload + def set_records( + self, + table: Table, + /, + records: Iterable[Dict[str, Any]], + ) -> None: ... + + def set_records(self, *args: Any, **kwargs: Any) -> None: + """ + Set the mock records for a particular base and table, replacing any existing records. + See :meth:`~MockAirtable.add_records` for more information. + + Args: + base_id: |arg_base_id| + *This must be the first positional argument.* + table_id_or_name: |arg_table_id_or_name| + This should be the same ID or name used in the code under test. + *This must be the second positional argument.* + table: An instance of :class:`~pyairtable.Table`. + *This is an alternative to providing base and table IDs, + and must be the first positional argument.* + records: A sequence of :class:`~pyairtable.api.types.RecordDict`, + :class:`~pyairtable.api.types.UpdateRecordDict`, + :class:`~pyairtable.api.types.CreateRecordDict`, + or :class:`~pyairtable.api.types.Fields`. + """ + base_id, table_name, records = _extract_args(args, kwargs, ["records"]) + self.records[(base_id, table_name)].clear() + self.add_records(base_id, table_name, records=records) + + def clear(self) -> None: + """ + Clear all records from the mock Airtable instance. + """ + self.records.clear() + + # side effects + + def _api_request(self, api: Api, method: str, url: str, **kwargs: Any) -> Any: + if not self.passthrough: + raise RuntimeError("unhandled call to Api.request") + mocked = self._mocks["Api.request"] + return mocked.temp_original(api, method, url, **kwargs) + + def _table_iterate(self, table: Table, **options: Any) -> List[List[RecordDict]]: + return [list(self.records[(table.base.id, table.name)].values())] + + def _table_get(self, table: Table, record_id: str, **options: Any) -> RecordDict: + return self.records[(table.base.id, table.name)][record_id] + + def _table_create( + self, + table: Table, + record: CreateRecordDict, + **kwargs: Any, + ) -> RecordDict: + records = self.records[(table.base.id, table.name)] + record = coerce_fake_record(record) + while record["id"] in records: + record["id"] = fake_id() # pragma: no cover + records[record["id"]] = record + return record + + def _table_update( + self, + table: Table, + record_id: RecordId, + fields: WritableFields, + **kwargs: Any, + ) -> RecordDict: + exists = self.records[(table.base.id, table.name)][record_id] + exists["fields"].update(fields) + return exists + + def _table_delete(self, table: Table, record_id: RecordId) -> RecordDeletedDict: + self.records[(table.base.id, table.name)].pop(record_id) + return {"id": record_id, "deleted": True} + + def _table_batch_create( + self, + table: Table, + records: Iterable[CreateRecordDict], + **kwargs: Any, + ) -> List[RecordDict]: + return [self._table_create(table, record) for record in records] + + def _table_batch_update( + self, + table: Table, + records: Iterable[UpdateRecordDict], + **kwargs: Any, + ) -> List[RecordDict]: + return [ + self._table_update(table, record["id"], record["fields"]) + for record in records + ] + + def _table_batch_delete( + self, + table: Table, + record_ids: Iterable[RecordId], + ) -> List[RecordDeletedDict]: + return [self._table_delete(table, record_id) for record_id in record_ids] + + def _table_batch_upsert( + self, + table: Table, + records: Iterable[AnyRecordDict], + key_fields: Iterable[FieldName], + **kwargs: Any, + ) -> UpsertResultDict: + """ + Perform a batch upsert operation on the mocked records for the table. + """ + key = fieldgetter(*key_fields) + existing_by_id = self.records[(table.base.id, table.name)] + existing_by_key = {key(r): r for r in existing_by_id.values()} + result: UpsertResultDict = { + "updatedRecords": [], + "createdRecords": [], + "records": [], + } + + for record in records: + existing_record: Optional[RecordDict] + if "id" in record: + record_id = str(record.get("id")) + existing_record = existing_by_id[record_id] + existing_record["fields"].update(record["fields"]) + result["updatedRecords"].append(record_id) + result["records"].append(existing_record) + elif existing_record := existing_by_key.get(key(record)): + existing_record["fields"].update(record["fields"]) + result["updatedRecords"].append(existing_record["id"]) + result["records"].append(existing_record) + else: + created_record = self._table_create(table, record) + result["createdRecords"].append(created_record["id"]) + result["records"].append(created_record) + + return result + + +def coerce_fake_record(record: Union[AnyRecordDict, Fields]) -> RecordDict: + """ + Coerce a record dict or field mapping to the expected format for + an Airtable record, creating a fake ID and createdTime if necessary. + + >>> coerce_fake_record({"Name": "Alice"}) + {'id': 'rec000...', 'createdTime': '...', 'fields': {'Name': 'Alice'}} + """ + if "fields" not in record: + record = {"fields": cast(Fields, record)} + return { + "id": str(record.get("id") or fake_id()), + "createdTime": str(record.get("createdTime") or _now()), + "fields": record["fields"], } + + +def _extract_args( + args: Sequence[Any], + kwargs: Dict[str, Any], + extract: Optional[Sequence[str]] = None, +) -> Tuple[Any, ...]: + """ + Convenience function for functions/methods which accept either + a Table or a (base_id, table_name) as their first posargs. + """ + extract = extract or [] + extracted = set() + caller = inspect.stack()[1].function + + if type(args[0]) is Table: + args = (args[0].base.id, args[0].name, *args[1:]) + + argtypes = tuple(type(arg) for arg in args) + if argtypes[:2] != (str, str): + raise TypeError( + f"{caller} expected (str, str, ...), got ({', '.join(t.__name__ for t in argtypes)})" + ) + + for extract_name in extract: + if extract_name in kwargs: + extracted.add(extract_name) + args = (*args, kwargs.pop(extract_name)) + + if kwargs: + raise TypeError( + f"{caller} got unexpected keyword arguments: {', '.join(kwargs)}" + ) + if len(args) < len(extract) + 2 and len(extracted) < len(extract): + missing = set(extract) - extracted + raise TypeError(f"{caller} missing keyword arguments: {', '.join(missing)}") + + return tuple(args) diff --git a/pyairtable/utils.py b/pyairtable/utils.py index 1ca55bd3..1adf42be 100644 --- a/pyairtable/utils.py +++ b/pyairtable/utils.py @@ -1,20 +1,51 @@ +import inspect +import re +import textwrap +import urllib.parse +import warnings from datetime import date, datetime -from typing import Iterator, Sequence, TypeVar, Union +from functools import partial, wraps +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generic, + Iterable, + Iterator, + List, + Optional, + Sequence, + TypeVar, + Union, + cast, +) -from pyairtable.api.types import CreateAttachmentDict +import requests +from typing_extensions import ParamSpec, Protocol, Self +from pyairtable.api.types import AnyRecordDict, CreateAttachmentByUrl, FieldValue + +if TYPE_CHECKING: + from pyairtable.api.api import Api + + +P = ParamSpec("P") +R = TypeVar("R", covariant=True) T = TypeVar("T") +C = TypeVar("C", contravariant=True) +F = TypeVar("F", bound=Callable[..., Any]) def datetime_to_iso_str(value: datetime) -> str: """ - Converts ``datetime`` object into Airtable compatible ISO 8601 string + Convert ``datetime`` object into Airtable compatible ISO 8601 string e.g. "2014-09-05T12:34:56.000Z" Args: value: datetime object """ - return value.isoformat(timespec="milliseconds") + "Z" + return value.isoformat(timespec="milliseconds").replace("+00:00", "Z") def datetime_from_iso_str(value: str) -> datetime: @@ -24,12 +55,14 @@ def datetime_from_iso_str(value: str) -> datetime: Args: value: datetime string, e.g. "2014-09-05T07:00:00.000Z" """ - return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ") + if value.endswith("Z"): + value = value[:-1] + "+00:00" + return datetime.fromisoformat(value) def date_to_iso_str(value: Union[date, datetime]) -> str: """ - Converts a ``date`` or ``datetime`` into an Airtable-compatible ISO 8601 string + Convert a ``date`` or ``datetime`` into an Airtable-compatible ISO 8601 string Args: value: date or datetime object, e.g. "2014-09-05" @@ -39,7 +72,7 @@ def date_to_iso_str(value: Union[date, datetime]) -> str: def date_from_iso_str(value: str) -> date: """ - Converts ISO 8601 date string into a ``date`` object. + Convert ISO 8601 date string into a ``date`` object. Args: value: date string, e.g. "2014-09-05" @@ -47,9 +80,9 @@ def date_from_iso_str(value: str) -> date: return datetime.strptime(value, "%Y-%m-%d").date() -def attachment(url: str, filename: str = "") -> CreateAttachmentDict: +def attachment(url: str, filename: str = "") -> CreateAttachmentByUrl: """ - Returns a dictionary using the expected dictionary format for creating attachments. + Build a ``dict`` in the expected format for creating attachments. When creating an attachment, ``url`` is required, and ``filename`` is optional. Airtable will download the file at the given url and keep its own copy of it. @@ -58,12 +91,12 @@ def attachment(url: str, filename: str = "") -> CreateAttachmentDict: Note: Attachment field values **must** be an array of :class:`~pyairtable.api.types.AttachmentDict` or - :class:`~pyairtable.api.types.CreateAttachmentDict`; + :class:`~pyairtable.api.types.CreateAttachmentByUrl`; it is not valid to pass a single item to the API. Usage: >>> table = Table(...) - >>> profile_url = "https://myprofile.com/id/profile.jpg + >>> profile_url = "https://example.com/profile.jpg" >>> rec = table.create({"Profile Photo": [attachment(profile_url)]}) { 'id': 'recZXOZ5gT9vVGHfL', @@ -71,8 +104,8 @@ def attachment(url: str, filename: str = "") -> CreateAttachmentDict: 'attachment': [ { 'id': 'attu6kbaST3wUuNTA', - 'url': 'https://aws1.discourse-cdn.com/airtable/original/2X/4/411e4fac00df06a5e316a0585a831549e11d0705.png', - 'filename': '411e4fac00df06a5e316a0585a831549e11d0705.png' + 'url': 'https://content.airtable.com/...', + 'filename': 'profile.jpg' } ] }, @@ -81,6 +114,11 @@ def attachment(url: str, filename: str = "") -> CreateAttachmentDict: """ + warnings.warn( + "attachment(url, filename) is deprecated; use {'url': url, 'filename': filename} instead.", + DeprecationWarning, + stacklevel=2, + ) return {"url": url} if not filename else {"url": url, "filename": filename} @@ -94,3 +132,416 @@ def chunked(iterable: Sequence[T], chunk_size: int) -> Iterator[Sequence[T]]: """ for i in range(0, len(iterable), chunk_size): yield iterable[i : i + chunk_size] + + +def is_airtable_id(value: Any, prefix: str = "") -> bool: + """ + Check whether the given value is an Airtable ID. + + Args: + value: The value to check. + prefix: If provided, the ID must have the given prefix. + """ + if not isinstance(value, str): + return False + if prefix and not value.startswith(prefix): + return False + return len(value) == 17 + + +is_record_id = partial(is_airtable_id, prefix="rec") +is_base_id = partial(is_airtable_id, prefix="app") +is_table_id = partial(is_airtable_id, prefix="tbl") +is_field_id = partial(is_airtable_id, prefix="fld") +is_user_id = partial(is_airtable_id, prefix="usr") + + +def enterprise_only(wrapped: F, /, modify_docstring: bool = True) -> F: + """ + Wrap a function or method so that if Airtable returns a 404, + we will annotate the error with a helpful note to the user. + """ + + if modify_docstring: + _prepend_docstring_text(wrapped, "|enterprise_only|") + + # Allow putting the decorator on a class + if inspect.isclass(wrapped): + for name, obj in vars(wrapped).items(): + if inspect.isfunction(obj): + setattr(wrapped, name, enterprise_only(obj)) + return cast(F, wrapped) # type: ignore[redundant-cast] + + @wraps(wrapped) + def _decorated(*args: Any, **kwargs: Any) -> Any: + try: + return wrapped(*args, **kwargs) + except requests.exceptions.HTTPError as exc: + if exc.response is not None and exc.response.status_code == 404: + exc.args = ( + *exc.args, + f"NOTE: {wrapped.__qualname__}() requires an enterprise billing plan.", + ) + raise exc + + return _decorated # type: ignore[return-value] + + +def _prepend_docstring_text(obj: Any, text: str, *, skip_empty: bool = True) -> None: + doc = obj.__doc__ or "" + if skip_empty and not doc: + return + doc = doc.lstrip("\n") + if has_leading_spaces := re.match(r"^\s+", doc): + text = textwrap.indent(text, has_leading_spaces[0]) + obj.__doc__ = f"{text}\n\n{doc}" + + +def _append_docstring_text( + obj: Any, text: str, *, skip_empty: bool = True, before_re: str = "" +) -> None: + doc = obj.__doc__ or "" + if skip_empty and not doc: + return + doc = doc.rstrip("\n") + if has_leading_spaces := re.match(r"^\s+", doc): + text = textwrap.indent(text, has_leading_spaces[0]) + if before_re and (match := re.search(before_re, doc, re.MULTILINE)): + text = text + "\n\n" + doc[match.start() :] + doc = doc[: match.start()].rstrip() + obj.__doc__ = f"{doc}\n\n{text}" + + +def docstring_from(obj: Any, append: str = "") -> Callable[[F], F]: + def _wrapper(func: F) -> F: + func.__doc__ = obj.__doc__ + if append: + _append_docstring_text(func, append) + return func + + return _wrapper + + +class _FetchMethod(Protocol, Generic[C, R]): + def __get__(self, instance: C, owner: Any) -> Callable[..., R]: ... + + def __call__(self_, self: C, *, force: bool = False) -> R: ... + + +def cache_unless_forced(func: Callable[[C], R]) -> _FetchMethod[C, R]: + """ + Wrap a method (e.g. ``Base.shares()``) in a decorator that will save + a memoized version of the return value for future reuse, but will also + allow callers to pass ``force=True`` to recompute the memoized version. + """ + + attr = f"_{func.__name__}" + if attr.startswith("__"): + attr = "_cached_" + attr.lstrip("_") + + @wraps(func) + def _inner(self: C, *, force: bool = False, **kwargs: Any) -> R: + if force or getattr(self, attr, None) is None: + setattr(self, attr, func(self, **kwargs)) + return cast(R, getattr(self, attr)) + + _inner.__annotations__["force"] = bool + _append_docstring_text(_inner, "Args:\n\tforce: |kwarg_force_metadata|") + + return cast(_FetchMethod[C, R], _inner) + + +def coerce_iso_str(value: Any) -> Optional[str]: + """ + Given an input that might be a date or datetime, or an ISO 8601 formatted str, + convert the value into an ISO 8601 formatted str. + """ + if value is None: + return value + if isinstance(value, str): + datetime.fromisoformat(value) # validates type, nothing more + return value + if isinstance(value, (date, datetime)): + return value.isoformat() + raise TypeError(f"cannot coerce {type(value)} into ISO 8601 str") + + +def coerce_list_str(value: Optional[Union[str, Iterable[str]]]) -> List[str]: + """ + Given an input that is either a str or an iterable of str, return a list. + """ + if value is None: + return [] + if isinstance(value, str): + return [value] + return list(value) + + +def fieldgetter( + *fields: str, + required: Union[bool, Iterable[str]] = False, +) -> Callable[[AnyRecordDict], Any]: + """ + Create a function that extracts ID, created time, or field values from a record. + Intended to be used in similar situations as + `operator.itemgetter `_. + + >>> record = {"id": "rec001", "fields": {"Name": "Alice"}} + >>> fieldgetter("Name")(record) + 'Alice' + >>> fieldgetter("id")(record) + 'rec001' + >>> fieldgetter("id", "Name", "Missing")(record) + ('rec001', 'Alice', None) + + Args: + fields: The field names to extract from the record. The values + ``"id"`` and ``"createdTime"`` are special cased; all other + values are interpreted as field names. + required: If True, will raise KeyError if a value is missing. + If False, missing values will return as None. + If a sequence of field names is provided, only those names + will be required. + """ + if isinstance(required, str): + required = {required} + elif required is True: + required = set(fields) + elif required is False: + required = [] + else: + required = set(required) + + def _get_field(record: AnyRecordDict, field: str) -> FieldValue: + src = record if field in ("id", "createdTime") else record["fields"] + if field in required and field not in src: + raise KeyError(field) + return src.get(field) + + if len(fields) == 1: + return partial(_get_field, field=fields[0]) + + def _getter(record: AnyRecordDict) -> Any: + return tuple(_get_field(record, field) for field in fields) + + return _getter + + +class Url(str): + """ + Wrapper for ``str`` that adds Path-like syntax for extending + URL components and adding query params. + + >>> url = Url('http://example.com') + >>> url + Url('http://example.com') + >>> url / 'foo' & {'a': 1, 'b': [2, 3, 4]} + Url('http://example.com/foo?a=1&b=2&b=3&b=4') + >>> url // [1, 2, 3, 4] + Url('http://example.com/1/2/3/4') + """ + + def _parse(self) -> urllib.parse.ParseResult: + """ + Shortcut for `urllib.parse.urlparse `_. + """ + return urllib.parse.urlparse(self) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + def __truediv__(self, other: Any) -> Self: + return self.add_path(other) + + def __floordiv__(self, others: Iterable[Any]) -> Self: + return self.add_path(*others) + + def __and__(self, params: Dict[str, Any]) -> Self: + return self.add_qs(params) + + def add_path(self, *others: Iterable[Any]) -> Self: + """ + Build a copy of this URL with additional path segments. + + >>> url = Url('http://example.com') + >>> url.add_path("a", "b", "c") + Url('http://example.com/a/b/c') + + The shorthand ``/`` has the same effect and can be used with a single path segment. + The shorthand ``//`` can be used with an iterable of path segments. + + >>> url / "a" / "b" / "c" + Url('http://example.com/a/b/c') + >>> url // ["a", "b", "c"] + Url('http://example.com/a/b/c') + """ + if not others: + raise TypeError("add_path() requires at least one argument") + parsed = self._parse() + if parsed.query: + raise ValueError("cannot add path segments after params") + parts = [str(other) for other in others] + if parsed.path: + parts.insert(0, parsed.path.rstrip("/")) + return self.replace_url(path="/".join(parts)) + + def add_qs( + self, + params: Optional[Dict[str, Any]] = None, + **other_params: Any, + ) -> Self: + """ + Build a copy of this URL with additional query parameters. + The shorthand ``&`` has the same effect. + + >>> url = Url('http://example.com') + >>> url.add_qs({"a": 1}, b=[2, 3, 4]) + Url('http://example.com?a=1&b=2&b=3&b=4') + >>> url & {"a": 1, "b": [2, 3, 4]} + Url('http://example.com?a=1&b=2&b=3&b=4') + """ + if not (params or other_params): + raise TypeError("add_qs() requires at least one argument") + params = {} if params is None else params + params.update(other_params) + parsed = self._parse() + qs = urllib.parse.parse_qs(parsed.query) + qs.update(params) + return self.replace_url(query=urllib.parse.urlencode(qs, doseq=True)) + + def replace_url(self, **kwargs: Any) -> Self: + """ + Build a copy of this URL with the given components replaced. + + >>> url = Url('http://example.com') + >>> url.replace(scheme='https', path='/foo') + Url('https://example.com/foo') + """ + return self.__class__(urllib.parse.urlunparse(self._parse()._replace(**kwargs))) + + +class UrlBuilder: + """ + Utility for defining URL patterns within an Airtable API class. + Each instance of UrlBuilder will inspect its own class attributes + and modify them to reflect the actual URL that should be used + based on the context (Table, Base, etc.) provided. + + The pattern for use in pyAirtable is: + + .. code-block:: python + + from functools import cached_property + from pyairtable.utils import UrlBuilder + + class SomeObject: + attr1: str + attr2: int + + class _urls(UrlBuilder): + url1 = "/path/to/{attr1}" + url2 = "/path/to/{attr2}" + + urls = cached_property(_urls) + + ...which ensures the URLs are built only once and are accessible via ``.urls``, + and have the ``SomeObject`` instance available as context, and build + readable docstrings for the ``SomeObject`` class documentation. + + .. warning:: + + This class is intended for use within pyAirtable only, and is tailored + to the type of documentation this library produces. Its behavior may + change in the future in ways that are not suitable for other projects. + """ + + context: Any + api: "Api" + + def __init__(self, context: Any = None): + self.context = context + self.api = self._find_api(context) + for attr, value in vars(self.__class__).items(): + if attr.startswith("_") or not isinstance(value, str): + continue + setattr(self, attr, self.build_url(value)) + + def build_url(self, value: str, **kwargs: Any) -> Url: + if "{" in value: + context = {**vars(self.context), **kwargs, "self": self.context} + value = value.format_map(context) + return self.api.build_url(value) + + def __init_subclass__(cls, **kwargs: Any) -> None: + # This is a documentation hack for pyAirtable use cases only, where we + # subclass UrlBuilder within the definition of the class that uses it. + # + # We dynamically add a docstring to each subclass explaining its use, + # and we rely on Sphinx to document the cached_property, not the class. + # + # Will be skipped if the subclass is passed skip_docstring=True. + if "." not in cls.__qualname__ or kwargs.pop("skip_docstring", False): + return super().__init_subclass__(**kwargs) + try: + sample_url = next(k for (k, v) in vars(cls).items() if isinstance(v, Url)) + except StopIteration: + # if no URLs defined, don't do anything + return super().__init_subclass__(**kwargs) + + parent_clsname = cls.__qualname__.rsplit(".", 1)[0] + parent_modname = cls.__module__ + parent_varname = parent_clsname.lower().replace(".", "_") + docstring = ( + f"URLs associated with :class:`~{parent_modname}.{parent_clsname}`" + " can be accessed via ``.urls`` using the following syntax:\n\n" + ".. code-block:: python\n\n" + f""" + >>> {parent_varname} = {parent_clsname}(...) + >>> {parent_varname}.urls.{sample_url} + Url('https://api.airtable.com/...') + """ + "\n\nThese properties are all instances of :class:`~pyairtable.utils.Url`." + ) + + for name, obj in vars(cls).items(): + qualname = f"{parent_modname}::{cls.__qualname__}.{name}" + if isinstance(obj, Url): + docstring += f"\n\n.. autoattribute:: {qualname}\n :noindex:" + elif callable(obj): + docstring += f"\n\n.. automethod:: {qualname}\n :noindex:" + + _append_docstring_text(cls, docstring, skip_empty=False) + + @classmethod + def _find_api(self, context: Any) -> "Api": + from pyairtable.api.api import Api # avoid circular import + + if isinstance(context, Api): + return context + if isinstance(api := getattr(context, "api", None), Api): + return api + raise TypeError("context must be an instance of Api or have an 'api' attribute") + + +__all__ = [ + "attachment", + "cache_unless_forced", + "chunked", + "coerce_iso_str", + "coerce_list_str", + "date_from_iso_str", + "date_to_iso_str", + "datetime_from_iso_str", + "datetime_to_iso_str", + "docstring_from", + "enterprise_only", + "fieldgetter", + "is_airtable_id", + "is_base_id", + "is_field_id", + "is_record_id", + "is_table_id", + "is_user_id", + "Url", + "UrlBuilder", +] diff --git a/pyproject.toml b/pyproject.toml index 97b1b0d9..08e3c045 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,5 +5,5 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 88 -target-version = ['py38'] +target-version = ['py310'] include = '\.pyi?$' diff --git a/requirements-dev.txt b/requirements-dev.txt index bc59bf2a..777a8e71 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,12 +3,17 @@ cogapp Sphinx==4.5.0 sphinx-autoapi sphinxext-opengraph -revitron-sphinx-theme @ git+https://github.com/gtalarico/revitron-sphinx-theme.git@40f4b09fa5c199e3844153ef973a1155a56981dd +revitron-sphinx-theme @ git+https://github.com/mesozoic/revitron-sphinx-theme.git@7ee572e9e4255c9aaa6b383656ff807fdac1011b sphinx-autodoc-typehints -autodoc-pydantic<2 +autodoc-pydantic>=2 +sphinxcontrib-applehelp==1.0.4 +sphinxcontrib-devhelp==1.0.2 +sphinxcontrib-htmlhelp==2.0.1 +sphinxcontrib-qthelp==1.0.3 +sphinxcontrib-serializinghtml==1.1.5 # Packaging -wheel==0.38.1 +wheel==0.46.2 twine==3.3.0 build==0.6.0.post1 diff --git a/scripts/bump.sh b/scripts/bump.sh deleted file mode 100755 index 62c6c724..00000000 --- a/scripts/bump.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -source ./scripts/console.sh - - -function bump { - previousVersion=$( grep '^__version__' pyairtable/__init__.py | sed 's/__version__ = \"\(.*\)\"/\1/' ) - previousVersion=$(echo -n "${previousVersion}") - info "Enter Version [current is ${previousVersion}]:" - read version - if [ -z "$version" ]; then - info "Empty version string - using existing" - version="$previousVersion" - return - fi - sed -i "" "s/^__version__ = .*$/__version__ = \"$version\"/" pyairtable/__init__.py - echo "Bumped __version__ to $version" -} - -function confirmEval { - info "CMD > $1" - info "ENTER to confirm" - read foo - eval $1 -} - -function push { - cmd="git commit -am \"Publish Version: $version\"" - confirmEval "$cmd" - - cmd="git tag -m \"Version $version\" $version" - confirmEval "$cmd" - - cmd="git push --tags origin main" - confirmEval "$cmd" -} - -bump -push diff --git a/scripts/clean.sh b/scripts/clean.sh index 163f5a9b..6d27e7d9 100755 --- a/scripts/clean.sh +++ b/scripts/clean.sh @@ -1,14 +1,14 @@ #!/bin/bash -source ./scripts/console.sh - -info "Cleanning up files 🧹" +echo "Cleaning up bytecode, cache, and build files 🧹" +set -x python3 -c "import pathlib; [p.unlink() for p in pathlib.Path('.').rglob('*.py[co]')]" python3 -c "import pathlib; [p.rmdir() for p in pathlib.Path('.').rglob('pytest_cache')]" -rm -rdf ./docs/build -rm -rdf ./dist rm -rdf ./build +rm -rdf ./dist +rm -rdf ./docs/build rm -rdf ./htmlcov -rm -rdf pyairtable.egg-info -rm -rdf .pytest_cache +rm -rdf .mypy_cache +rm -rdf .pytest_cache +rm -rdf pyairtable.egg-info diff --git a/scripts/console.sh b/scripts/console.sh deleted file mode 100755 index debad8f1..00000000 --- a/scripts/console.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -RED='\033[1;31m' -L_GREEN='\033[1;32m' -L_BLUE='\033[1;34m' -L_GREY='\033[0;37m' -WHITE='\033[1;37m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -console() { - local color=$1 - local msg=$2 - printf "${!color}${msg}${NC}\n" -} - -error() { - local msg=$1 - console 'RED' "==> $msg" -} - -info() { - local msg=$1 - console 'L_GREEN' "==> ${msg}" -} -warn() { - local msg=$1 - console 'L_BLUE' "==> ${msg}" -} diff --git a/scripts/find_model_changes.py b/scripts/find_model_changes.py new file mode 100644 index 00000000..e33e1d1f --- /dev/null +++ b/scripts/find_model_changes.py @@ -0,0 +1,354 @@ +""" +Scans the API documentation on airtable.com and compares it to the models in pyAirtable. +Attempts to flag any places where the library is missing fields or has extra undocumented fields. +""" + +import importlib +import json +import re +from functools import cached_property +from operator import attrgetter +from typing import Any, Dict, Iterator, List, Optional, Type + +import click +import requests + +from pyairtable.models._base import AirtableModel + +API_PREFIX = "https://airtable.com/developers/web/api" +API_INTRO = f"{API_PREFIX}/introduction" +INITDATA_RE = r"]*>\s*window\.initData = (\{.*\})\s*" + +SCAN_MODELS = { + "pyairtable.api.enterprise:UserRemoved": "remove-user-from-enterprise.response", + "pyairtable.api.enterprise:UserRemoved.Shared": "remove-user-from-enterprise.response/@shared", + "pyairtable.api.enterprise:UserRemoved.Shared.Workspace": "remove-user-from-enterprise.response/@shared/@workspaces/items", + "pyairtable.api.enterprise:UserRemoved.Unshared": "remove-user-from-enterprise.response/@unshared", + "pyairtable.api.enterprise:UserRemoved.Unshared.Base": "remove-user-from-enterprise.response/@unshared/@bases/items", + "pyairtable.api.enterprise:UserRemoved.Unshared.Interface": "remove-user-from-enterprise.response/@unshared/@interfaces/items", + "pyairtable.api.enterprise:UserRemoved.Unshared.Workspace": "remove-user-from-enterprise.response/@unshared/@workspaces/items", + "pyairtable.api.enterprise:DeleteUsersResponse": "delete-users-by-email.response", + "pyairtable.api.enterprise:DeleteUsersResponse.UserInfo": "delete-users-by-email.response/@deletedUsers/items", + "pyairtable.api.enterprise:DeleteUsersResponse.Error": "delete-users-by-email.response/@errors/items", + "pyairtable.api.enterprise:ManageUsersResponse": "manage-user-membership.response", + "pyairtable.api.enterprise:ManageUsersResponse.Error": "manage-user-membership.response/@errors/items", + "pyairtable.api.enterprise:MoveError": "move-workspaces.response/@errors/items", + "pyairtable.api.enterprise:MoveGroupsResponse": "move-user-groups.response", + "pyairtable.api.enterprise:MoveWorkspacesResponse": "move-workspaces.response", + "pyairtable.models.audit:AuditLogResponse": "audit-log-events.response", + "pyairtable.models.audit:AuditLogEvent": "audit-log-events.response/@events/items", + "pyairtable.models.audit:AuditLogEvent.Context": "audit-log-events.response/@events/items/@context", + "pyairtable.models.audit:AuditLogEvent.Origin": "audit-log-events.response/@events/items/@origin", + "pyairtable.models.audit:AuditLogActor": "schemas/audit-log-actor", + "pyairtable.models.audit:AuditLogActor.UserInfo": "schemas/audit-log-actor/@user", + "pyairtable.models.collaborator:Collaborator": "list-comments.response/@comments/items/@author", + "pyairtable.models.comment:Comment": "list-comments.response/@comments/items", + "pyairtable.models.comment:Reaction": "list-comments.response/@comments/items/@reactions/items", + "pyairtable.models.comment:Reaction.EmojiInfo": "list-comments.response/@comments/items/@reactions/items/@emoji", + "pyairtable.models.comment:Reaction.ReactingUser": "list-comments.response/@comments/items/@reactions/items/@reactingUser", + "pyairtable.models.comment:Mentioned": "schemas/user-mentioned", + "pyairtable.models.schema:BaseSchema": "get-base-schema.response", + "pyairtable.models.schema:TableSchema": "schemas/table-model", + "pyairtable.models.schema:Bases": "list-bases.response", + "pyairtable.models.schema:Bases.Info": "list-bases.response/@bases/items", + "pyairtable.models.schema:BaseCollaborators": "get-base-collaborators.response", + "pyairtable.models.schema:BaseCollaborators.IndividualCollaborators": "get-base-collaborators.response/@individualCollaborators", + "pyairtable.models.schema:BaseCollaborators.GroupCollaborators": "get-base-collaborators.response/@groupCollaborators", + "pyairtable.models.schema:BaseCollaborators.InterfaceCollaborators": "get-base-collaborators.response/@interfaces/*", + "pyairtable.models.schema:BaseCollaborators.InviteLinks": "get-base-collaborators.response/@inviteLinks", + "pyairtable.models.schema:BaseCollaborators.SensitivityLabel": "get-base-collaborators.response/@sensitivityLabel", + "pyairtable.models.schema:BaseShares": "list-shares.response", + "pyairtable.models.schema:BaseShares.Info": "list-shares.response/@shares/items", + "pyairtable.models.schema:ViewSchema": "get-view-metadata.response", + "pyairtable.models.schema:InviteLink": "schemas/invite-link", + "pyairtable.models.schema:WorkspaceInviteLink": "schemas/invite-link", + "pyairtable.models.schema:InterfaceInviteLink": "schemas/invite-link", + "pyairtable.models.schema:EnterpriseInfo": "get-enterprise.response", + "pyairtable.models.schema:EnterpriseInfo.EmailDomain": "get-enterprise.response/@emailDomains/items", + "pyairtable.models.schema:EnterpriseInfo.AggregatedIds": "get-enterprise.response/@aggregated", + "pyairtable.models.schema:WorkspaceCollaborators": "get-workspace-collaborators.response", + "pyairtable.models.schema:WorkspaceCollaborators.Restrictions": "get-workspace-collaborators.response/@workspaceRestrictions", + "pyairtable.models.schema:WorkspaceCollaborators.GroupCollaborators": "get-workspace-collaborators.response/@groupCollaborators", + "pyairtable.models.schema:WorkspaceCollaborators.IndividualCollaborators": "get-workspace-collaborators.response/@individualCollaborators", + "pyairtable.models.schema:WorkspaceCollaborators.InviteLinks": "get-workspace-collaborators.response/@inviteLinks", + "pyairtable.models.schema:GroupCollaborator": "schemas/group-collaborator", + "pyairtable.models.schema:IndividualCollaborator": "schemas/individual-collaborator", + "pyairtable.models.schema:BaseGroupCollaborator": "schemas/base-group-collaborator", + "pyairtable.models.schema:BaseIndividualCollaborator": "schemas/base-individual-collaborator", + "pyairtable.models.schema:BaseInviteLink": "schemas/base-invite-link", + "pyairtable.models.schema:Collaborations": "schemas/collaborations", + "pyairtable.models.schema:Collaborations.BaseCollaboration": "schemas/collaborations/@baseCollaborations/items", + "pyairtable.models.schema:Collaborations.InterfaceCollaboration": "schemas/collaborations/@interfaceCollaborations/items", + "pyairtable.models.schema:Collaborations.WorkspaceCollaboration": "schemas/collaborations/@workspaceCollaborations/items", + "pyairtable.models.schema:UserInfo": "get-user-by-id.response", + "pyairtable.models.schema:UserInfo.AggregatedIds": "get-user-by-id.response/@aggregated", + "pyairtable.models.schema:UserInfo.DescendantIds": "get-user-by-id.response/@descendants/*", + "pyairtable.models.schema:UserGroup": "get-user-group.response", + "pyairtable.models.schema:UserGroup.Member": "get-user-group.response/@members/items", + "pyairtable.models.webhook:Webhook": "list-webhooks.response/@webhooks/items", + "pyairtable.models.webhook:WebhookNotificationResult": "schemas/webhooks-notification", + "pyairtable.models.webhook:WebhookError": "schemas/webhooks-notification/@error", + "pyairtable.models.webhook:WebhookPayloads": "list-webhook-payloads.response", + "pyairtable.models.webhook:WebhookPayload": "schemas/webhooks-payload", + "pyairtable.models.webhook:WebhookPayload.ActionMetadata": "schemas/webhooks-action", + "pyairtable.models.webhook:WebhookPayload.FieldChanged": "schemas/webhooks-table-changed/@changedFieldsById/*", + "pyairtable.models.webhook:WebhookPayload.FieldInfo": "schemas/webhooks-table-changed/@changedFieldsById/*/@current", + "pyairtable.models.webhook:WebhookPayload.RecordChanged": "schemas/webhooks-changed-record/*", + "pyairtable.models.webhook:WebhookPayload.RecordCreated": "schemas/webhooks-created-record/*", + "pyairtable.models.webhook:WebhookPayload.TableChanged": "schemas/webhooks-table-changed", + "pyairtable.models.webhook:WebhookPayload.TableChanged.ChangedMetadata": "schemas/webhooks-table-changed/@changedMetadata", + "pyairtable.models.webhook:WebhookPayload.TableInfo": "schemas/webhooks-table-changed/@changedMetadata/@current", + "pyairtable.models.webhook:WebhookPayload.TableCreated": "schemas/webhooks-table-created", + "pyairtable.models.webhook:WebhookPayload.ViewChanged": "schemas/webhooks-table-changed/@changedViewsById/*", + "pyairtable.models.webhook:CreateWebhook": "create-a-webhook.request", + "pyairtable.models.webhook:CreateWebhookResponse": "create-a-webhook.response", + "pyairtable.models.webhook:WebhookSpecification": "create-a-webhook.request/@specification", + "pyairtable.models.webhook:WebhookSpecification.Options": "schemas/webhooks-specification", + "pyairtable.models.webhook:WebhookSpecification.Includes": "schemas/webhooks-specification/@includes", + "pyairtable.models.webhook:WebhookSpecification.Filters": "schemas/webhooks-specification/@filters", + "pyairtable.models.webhook:WebhookSpecification.SourceOptions": "schemas/webhooks-specification/@filters/@sourceOptions", + "pyairtable.models.webhook:WebhookSpecification.SourceOptions.FormSubmission": "schemas/webhooks-specification/@filters/@sourceOptions/@formSubmission", + "pyairtable.models.webhook:WebhookSpecification.SourceOptions.FormPageSubmission": "schemas/webhooks-specification/@filters/@sourceOptions/@formPageSubmission", + "pyairtable.models.schema:TableSchema.DateDependency": "schemas/date-dependency-settings", +} + +IGNORED = [ + "pyairtable.models.audit.AuditLogResponse.Pagination", # pagination, not exposed + "pyairtable.models.schema.NestedId", # internal + "pyairtable.models.schema.NestedFieldId", # internal + "pyairtable.models.schema.Bases.offset", # pagination, not exposed + "pyairtable.models.schema.BaseCollaborators.collaborators", # deprecated + "pyairtable.models.schema.WorkspaceCollaborators.collaborators", # deprecated + "pyairtable.models.webhook.WebhookPayload.cursor", # pyAirtable provides this + "pyairtable.models.schema.BaseShares.Info.shareTokenPrefix", # deprecated + "pyairtable.models.webhook.WebhookPayload.CellValuesByFieldId", # undefined in schema + "pyairtable.models.webhook.WebhookNotification", # undefined in schema +] + + +@click.command() +@click.option( + "--save", + "save_apidata", + help="Save API schema information to a file.", + type=click.Path(writable=True), +) +def main(save_apidata: Optional[str]) -> None: + api_data = get_api_data() + if save_apidata: + with open(save_apidata, "w") as f: + json.dump(api_data, f, indent=2, sort_keys=True) + + identify_missing_fields(api_data) + identify_unscanned_classes(api_data) + + +def identify_missing_fields(api_data: "ApiData") -> None: + issues: List[str] = [] + + # Find missing/extra fields + for model_path, data_path in SCAN_MODELS.items(): + modname, clsname = model_path.split(":", 1) + model_module = importlib.import_module(modname) + model_cls = attrgetter(clsname)(model_module) + # Use obj/@thing as shorthand for obj/properties/thing + data_path = data_path.replace("/@", "/properties/") + # Use obj/* as shorthand for obj/additionalProperties + data_path = re.sub(r"/\*(/|$)", r"/additionalProperties\1", data_path) + # Use list-bases.request as shorthand for operations/list-bases/request/schema + # and list-bases.response as shorthand for operations/list-bases/response/schema + data_path = re.sub( + r"(^|/)([a-zA-Z_-]+)\.(request|response)(/|$)", + r"\1operations/\2/\3/schema\4", + data_path, + ) + issues.extend(scan_schema(model_cls, api_data.get_nested(data_path))) + + if not issues: + print("No missing/extra fields found in scanned classes") + else: + for issue in issues: + print(issue) + + +def identify_unscanned_classes(api_data: "ApiData") -> None: + issues: List[str] = [] + + # Find unscanned model classes + modules = sorted({model_path.split(":")[0] for model_path in SCAN_MODELS}) + for modname in modules: + if not ignore_name(modname): + mod = importlib.import_module(modname) + issues.extend(scan_missing(mod, prefix=(modname + ":"))) + + if not issues: + print("No unscanned classes found in scanned modules") + else: + for issue in issues: + print(issue) + + +def ignore_name(name: str) -> bool: + if "." in name and any(ignore_name(n) for n in name.split(".")): + return True + return ( + name in IGNORED + or name.startswith("_") + or name.endswith("FieldConfig") + or name.endswith("FieldOptions") + or name.endswith("FieldSchema") + ) + + +class ApiData(Dict[str, Any]): + """ + Wrapper around ``dict`` that adds convenient behavior for reading the API definition. + """ + + def __getitem__(self, key: str) -> Any: + # handy shortcuts + if key == "operations": + return self.by_operation + if key == "schemas": + return self.by_model_name + return super().__getitem__(key) + + def get_nested(self, path: str, separator: str = "/") -> Any: + """ + Retrieves nested objects with a path-like syntax. + """ + get_from = self + traversed = [] + try: + while separator in path: + next_key, path = path.split(separator, 1) + traversed.append(next_key) + get_from = get_from[next_key] + traversed.append(path) + return get_from[path] + except KeyError as exc: + exc.args = tuple(traversed) + raise exc + + @cached_property + def by_operation(self) -> Dict[str, Dict[str, Any]]: + """ + Simplifies traversal of request/response information for defined web API operations, + grouping them by the operation name instead of path/method. + """ + result: Dict[str, Dict[str, Any]] = {} + paths: Dict[str, Dict[str, Any]] = self["openApi"]["paths"] + methodinfo_dicts = [ + methodinfo + for pathinfo in paths.values() + for methodinfo in pathinfo.values() + if isinstance(methodinfo, dict) + ] + for methodinfo in methodinfo_dicts: + methodname = str(methodinfo["operationId"]).lower() + r = result[methodname] = {} + try: + r["response"] = methodinfo["responses"]["200"]["content"]["application/json"] # fmt: skip + except KeyError: + pass + try: + r["request"] = methodinfo["requestBody"]["content"]["application/json"] # fmt: skip + except KeyError: + pass + + return result + + @cached_property + def by_model_name(self) -> Dict[str, Dict[str, Any]]: + """ + Simplifies traversal of schema information by preemptively collapsing + anyOf models + """ + return { + key: self.collapse_schema(self.get_model(name)) + for name in self["openApi"]["components"]["schemas"] + for key in (str(name), str(name).lower()) + } + + def get_model(self, name: str) -> Dict[str, Any]: + """ + Retrieve a model schema by name. + """ + return self.collapse_schema( + self.get_nested(f"openApi/components/schemas/{name}") + ) + + def collapse_schema(self, schema: Dict[str, Any]) -> Dict[str, Any]: + """ + Merge together properties of all entries in anyOf or allOf schemas. + This is acceptable for our use case, but a bad idea in most other cases. + """ + if set(schema) == {"$ref"}: + if (ref := schema["$ref"]).startswith("#/components/schemas/"): + return self.collapse_schema(self.get_model(ref.split("/")[-1])) + raise ValueError(f"unhandled $ref: {ref}") + + for key in ("anyOf", "allOf"): + if key not in schema: + continue + collected_properties = {} + subschema: Dict[str, Any] + for subschema in list(schema[key]): + if subschema.get("type") == "object" or "$ref" in subschema: + collected_properties.update( + self.collapse_schema(subschema).get("properties", {}) + ) + return {"properties": collected_properties} + + return schema + + +def get_api_data() -> ApiData: + """ + Retrieve API information. + """ + response = requests.get(API_INTRO) + response.raise_for_status() + match = re.search(INITDATA_RE, response.text) + if not match: + raise RuntimeError(f"could not find {INITDATA_RE!r} in {API_INTRO}") + return ApiData(json.loads(match.group(1))) + + +def scan_schema(cls: Type[AirtableModel], schema: Dict[str, Any]) -> Iterator[str]: + """ + Yield error messages for missing or undocumented fields. + """ + + name = f"{cls.__module__}.{cls.__qualname__}" + model_aliases = {f.alias for f in cls.model_fields.values() if f.alias} + api_properties = set(schema["properties"]) + missing_keys = api_properties - model_aliases + extra_keys = model_aliases - api_properties + for missing_key in missing_keys: + if not ignore_name(f"{name}.{missing_key}"): + yield f"{name} is missing field: {missing_key}" + for extra_key in extra_keys: + if not ignore_name(f"{name}.{extra_key}"): + yield (f"{name} has undocumented field: {extra_key}") + + +def scan_missing(container: Any, prefix: str) -> Iterator[str]: + """ + Yield error messages for models within the given container which were not scanned. + """ + for name, obj in vars(container).items(): + if not isinstance(obj, type) or not issubclass(obj, AirtableModel): + continue + # ignore imported models in other modules + if not prefix.startswith(obj.__module__): + continue + if ignore_name(f"{obj.__module__}.{obj.__qualname__}"): + continue + if (subpath := f"{prefix}{name}") not in SCAN_MODELS: + yield f"{subpath} was not scanned" + yield from scan_missing(obj, prefix=(subpath + ".")) + + +if __name__ == "__main__": + main() diff --git a/scripts/format.sh b/scripts/format.sh deleted file mode 100755 index 4ad422c7..00000000 --- a/scripts/format.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -set -e - -source ./scripts/console.sh - -info 'Formatting' - -flake8 . -black --diff . diff --git a/scripts/githooks/pre-commit b/scripts/githooks/pre-commit deleted file mode 100755 index d0c4ea41..00000000 --- a/scripts/githooks/pre-commit +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -e - -# This script is deprecated and just replaces itself with pre-commit. -echo "$0: deprecated githooks script; replacing with pre-commit" -cd $(dirname $0)/../.. -git config --local core.hooksPath && git config --local --unset core.hooksPath -make setup -.git/hooks/pre-commit "$@" diff --git a/scripts/githooks/pre-push b/scripts/githooks/pre-push deleted file mode 100755 index 5a4bc047..00000000 --- a/scripts/githooks/pre-push +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -e - -# This script is deprecated and just replaces itself with pre-commit. -echo "$0: deprecated githooks script; replacing with pre-commit" -cd $(dirname $0)/../.. -git config --local core.hooksPath && git config --local --unset core.hooksPath -make setup diff --git a/scripts/release.sh b/scripts/release.sh new file mode 100755 index 00000000..dec39132 --- /dev/null +++ b/scripts/release.sh @@ -0,0 +1,41 @@ +#!/bin/zsh + +function fail { + echo "$@" >&2 + exit 1 +} + +function confirm_eval { + command=($@) + echo "% ${(q)command[@]}" + read -k "confirm?Run? [y/n] "; echo + [[ ! "$confirm" =~ [yY] ]] && fail "Cancelled." + eval "${(q)command[@]}" +} + +function bump { + current_version=$(python3 -c 'from pyairtable import __version__; print(__version__)') + read "release_version?Release version [$current_version]: " + if [[ -z "$release_version" ]]; then + release_version=$current_version + elif [[ "$release_version" != "$current_version" ]]; then + sed -i "" "s/^__version__ = .*$/__version__ = \"$release_version\"/" pyairtable/__init__.py + git add pyairtable/__init__.py + PAGER=cat git status + PAGER=cat git diff --cached pyairtable/__init__.py + confirm_eval git commit -m "Release $release_version" pyairtable/__init__.py + fi +} + +function push { + endpoint=gtalarico/pyairtable + origin=$(git remote -v | grep $endpoint | grep '\(push\)' | awk '{print $1}') + if [[ -z "$origin" ]]; then + fail "no remote matching $endpoint" + fi + confirm_eval git tag -s -m "Release $release_version" $release_version + confirm_eval git push $origin $release_version +} + +bump +push diff --git a/setup.cfg b/setup.cfg index fd968f11..e5328968 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,10 +18,11 @@ classifiers = Intended Audience :: Developers License :: OSI Approved :: MIT License Programming Language :: Python - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 + Programming Language :: Python :: 3.14 Programming Language :: Python :: Implementation :: CPython Topic :: Software Development @@ -30,10 +31,18 @@ classifiers = packages = find: install_requires = inflection - pydantic + pydantic >= 2, < 3 requests >= 2.22.0 typing_extensions urllib3 >= 1.26 +[options.extras_require] +cli = + click + +[options.entry_points] +console_scripts = + pyairtable = pyairtable.cli:cli + [aliases] test=pytest diff --git a/tests/conftest.py b/tests/conftest.py index 5bf913c3..b27bfe75 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,21 @@ +import importlib import json +import re from collections import OrderedDict from pathlib import Path from posixpath import join as urljoin -from typing import Callable +from typing import Any, Callable from urllib.parse import quote, urlencode import pytest from mock import Mock from requests import HTTPError +from requests_mock import Mocker -from pyairtable.api import Api, Base, Table +from pyairtable import Api, Base, Table, Workspace +from pyairtable.api.enterprise import Enterprise +from pyairtable.models.schema import TableSchema +from pyairtable.testing import fake_id @pytest.fixture @@ -30,7 +36,9 @@ def _url_builder(base_id, table_name, params=None): @pytest.fixture def constants(): return dict( - API_KEY="FakeApiKey", BASE_ID="appJMY16gZDQrMWpA", TABLE_NAME="Table Name" + API_KEY="FakeApiKey", + BASE_ID="appLkNDICXNqxSDhG", + TABLE_NAME="Table Name", ) @@ -39,9 +47,14 @@ def api(constants) -> Api: return Api(constants["API_KEY"]) +@pytest.fixture +def base_id(constants) -> str: + return constants["BASE_ID"] + + @pytest.fixture() -def base(api: Api, constants) -> Base: - return api.base(constants["BASE_ID"]) +def base(api: Api, base_id) -> Base: + return api.base(base_id) @pytest.fixture() @@ -49,6 +62,31 @@ def table(base: Base, constants) -> Table: return base.table(constants["TABLE_NAME"]) +@pytest.fixture() +def table_schema(sample_json, api, base) -> TableSchema: + return TableSchema.model_validate(sample_json("TableSchema")) + + +@pytest.fixture +def mock_table_schema(table, requests_mock, sample_json) -> Mocker: + table_schema = sample_json("TableSchema") + table_schema["id"] = table.name = fake_id("tbl") + return requests_mock.get( + table.base.urls.tables + "?include=visibleFieldIds", + json={"tables": [table_schema]}, + ) + + +@pytest.fixture +def workspace_id() -> str: + return "wspmhESAta6clCCwF" # see WorkspaceCollaborators.json + + +@pytest.fixture +def workspace(api: Api, workspace_id) -> Workspace: + return api.workspace(workspace_id) + + @pytest.fixture def mock_records(): return [ @@ -134,3 +172,57 @@ def _get_sample_json(name): return json.load(fp) return _get_sample_json + + +@pytest.fixture +def schema_obj(api, sample_json): + """ + Test fixture that provides a callable function which retrieves + an object generated from tests/sample_data, and optionally + retrieves an attribute of that object. + """ + + def _get_schema_obj(name: str, *, context: Any = None) -> Any: + if name.startswith("pyairtable."): + # pyairtable.models.Webhook.created_time -> ('pyairtable.models', 'Webhook.created_time') + match = re.match(r"(pyairtable\.[a-z_.]+)\.([A-Z].+)$", name) + modpath, name = match.groups() + else: + modpath = "pyairtable.models.schema" + + obj_name, _, obj_path = name.partition(".") + obj_data = sample_json(obj_name) + obj_cls = getattr(importlib.import_module(modpath), obj_name) + + if context: + obj = obj_cls.from_api(obj_data, api, context=context) + else: + obj = obj_cls.model_validate(obj_data) + + if obj_path: + obj = eval(f"obj.{obj_path}", None, {"obj": obj}) + return obj + + return _get_schema_obj + + +@pytest.fixture +def mock_base_metadata(base, sample_json, requests_mock): + base_json = sample_json("BaseCollaborators") + requests_mock.get(base.api.urls.bases, json=sample_json("Bases")) + requests_mock.get(base.urls.meta, json=base_json) + requests_mock.get(base.urls.tables, json=sample_json("BaseSchema")) + requests_mock.get(base.urls.shares, json=sample_json("BaseShares")) + for pbd_id, pbd_json in base_json["interfaces"].items(): + requests_mock.get(base.urls.interface(pbd_id), json=pbd_json) + + +@pytest.fixture +def mock_workspace_metadata(workspace, sample_json, requests_mock): + workspace_json = sample_json("WorkspaceCollaborators") + requests_mock.get(workspace.urls.meta, json=workspace_json) + + +@pytest.fixture +def enterprise(api): + return Enterprise(api, "entUBq2RGdihxl3vU") diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 7541e332..68db160d 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -10,19 +10,21 @@ def valid_img_url(): return "https://github.com/gtalarico/pyairtable/raw/9f243cb0935ad7112859f990434612efdaf49c67/docs/source/_static/logo.png" +class Columns: + # Table should have these Columns + TEXT = "text" # Text + TEXT_ID = "fldzbVdWW4xJdZ1em" # for returnFieldsByFieldId + NUM = "number" # Number, float + NUM_ID = "fldFLyuxGuWobyMV2" # for returnFieldsByFieldId + BOOL = "boolean" # Boolean + DATETIME = "datetime" # Datetime + ATTACHMENT = "attachment" # attachment + ATTACHMENT_ID = "fld5VP9oPeCpvIumr" # for upload_attachment + + @pytest.fixture -def cols(): - class Columns: - # Table should have these Columns - TEXT = "text" # Text - TEXT_ID = "fldzbVdWW4xJdZ1em" # for returnFieldsByFieldId - NUM = "number" # Number, float - NUM_ID = "fldFLyuxGuWobyMV2" # for returnFieldsByFieldId - BOOL = "boolean" # Boolean - DATETIME = "datetime" # Datetime - ATTACHMENT = "attachment" # attachment - - return Columns +def cols() -> Columns: + return Columns() @pytest.fixture diff --git a/tests/integration/test_integration_api.py b/tests/integration/test_integration_api.py index b8b994fe..1f43ad64 100644 --- a/tests/integration/test_integration_api.py +++ b/tests/integration/test_integration_api.py @@ -1,11 +1,12 @@ -from datetime import datetime +from datetime import datetime, timezone from uuid import uuid4 import pytest +import requests from pyairtable import Table -from pyairtable import formulas as fo -from pyairtable.utils import attachment +from pyairtable.formulas import AND, EQ, FIND, OR, RECORD_ID, Field, match +from pyairtable.utils import date_to_iso_str, datetime_to_iso_str pytestmark = [pytest.mark.integration] @@ -50,43 +51,50 @@ def test_integration_table(table, cols): assert len(records) == COUNT -def test_return_fields_by_field_id(table: Table, cols): +def test_use_field_ids(table: Table, cols): """ Test that we can get, create, and update records by field ID vs. name. See https://github.com/gtalarico/pyairtable/issues/194 + and https://github.com/gtalarico/pyairtable/issues/430 """ - # Create one record with return_fields_by_field_id=True - record = table.create({cols.TEXT_ID: "Hello"}, return_fields_by_field_id=True) + # Create one record with use_field_ids=True + record = table.create({cols.TEXT_ID: "Hello"}, use_field_ids=True) assert record["fields"][cols.TEXT_ID] == "Hello" - # Updating a record by field ID does not require any special parameters, - # but the return value will have field names (not IDs). - updated = table.update(record["id"], {cols.TEXT_ID: "Goodbye"}) - assert updated["fields"][cols.TEXT] == "Goodbye" + # Fetch one record with use_field_ids=True + fetched = table.get(record["id"], use_field_ids=True) + assert fetched["fields"][cols.TEXT_ID] == "Hello" - # This is not supported (422 Client Error: Unprocessable Entity for url) - # updated = table.update( - # record["id"], - # {cols.TEXT_ID: "Goodbye"}, - # return_fields_by_field_id=True, - # ) - # assert updated["fields"][cols.TEXT_ID] == "Goodbye" + # Update one record with use_field_ids=True + updated = table.update( + record["id"], + {cols.TEXT_ID: "Goodbye"}, + use_field_ids=True, + ) + assert updated["fields"][cols.TEXT_ID] == "Goodbye" - # Create multiple records with return_fields_by_field_id=True + # Create multiple records with use_field_ids=True records = table.batch_create( [ {cols.TEXT_ID: "Alpha"}, {cols.TEXT_ID: "Bravo"}, {cols.TEXT_ID: "Charlie"}, ], - return_fields_by_field_id=True, + use_field_ids=True, ) assert records[0]["fields"][cols.TEXT_ID] == "Alpha" assert records[1]["fields"][cols.TEXT_ID] == "Bravo" assert records[2]["fields"][cols.TEXT_ID] == "Charlie" - # Update multiple records with return_fields_by_field_id=True + # Fetch multiple records with use_field_ids=True + formula = OR(RECORD_ID().eq(record["id"]) for record in records) + fetched_many = {r["id"]: r for r in table.all(formula=formula, use_field_ids=True)} + assert fetched_many[records[0]["id"]]["fields"][cols.TEXT_ID] == "Alpha" + assert fetched_many[records[1]["id"]]["fields"][cols.TEXT_ID] == "Bravo" + assert fetched_many[records[2]["id"]]["fields"][cols.TEXT_ID] == "Charlie" + + # Update multiple records with use_field_ids=True updates = [ dict( record, @@ -94,7 +102,7 @@ def test_return_fields_by_field_id(table: Table, cols): ) for record in records ] - updated = table.batch_update(updates, return_fields_by_field_id=True) + updated = table.batch_update(updates, use_field_ids=True) assert updated[0]["fields"][cols.TEXT_ID] == "Hello, Alpha" assert updated[1]["fields"][cols.TEXT_ID] == "Hello, Bravo" assert updated[2]["fields"][cols.TEXT_ID] == "Hello, Charlie" @@ -117,7 +125,7 @@ def test_get_records_options(table: Table, cols): assert table.all(sort=[cols.TEXT, cols.NUM]) == [rec] assert table.all(time_zone="utc") == [rec] assert table.all(user_locale="en-ie") == [rec] - assert table.all(return_fields_by_field_id=True) == [ + assert table.all(use_field_ids=True) == [ { "id": rec["id"], "createdTime": rec["createdTime"], @@ -137,7 +145,7 @@ def test_get_records_options(table: Table, cols): assert table.all(formula=formula, sort=[cols.TEXT, cols.NUM]) == [rec] assert table.all(formula=formula, time_zone="utc") == [rec] assert table.all(formula=formula, user_locale="en-ie") == [rec] - assert table.all(formula=formula, return_fields_by_field_id=True) == [ + assert table.all(formula=formula, use_field_ids=True) == [ { "id": rec["id"], "createdTime": rec["createdTime"], @@ -153,17 +161,17 @@ def test_integration_field_equals(table: Table, cols): rv_create = table.create(values) # match all - finds - rv_first = table.first(formula=fo.match(values)) + rv_first = table.first(formula=match(values)) assert rv_first and rv_first["id"] == rv_create["id"] # match all - does not find values = {cols.TEXT: TEXT_VALUE, cols.NUM: 0} - rv_first = table.first(formula=fo.match(values)) + rv_first = table.first(formula=match(values)) assert rv_first is None # match all w/ match_any=True - does not find values = {cols.TEXT: TEXT_VALUE, cols.NUM: 0} - rv_first = table.first(formula=fo.match(values, match_any=True)) + rv_first = table.first(formula=match(values, match_any=True)) assert rv_first and rv_first["id"] == rv_create["id"] @@ -212,11 +220,11 @@ def test_batch_upsert(table: Table, cols): assert result["records"][2]["fields"] == {cols.TEXT: "Three", cols.NUM: 6} assert result["records"][3]["fields"] == {cols.NUM: 7} - # Test that batch_upsert passes along return_fields_by_field_id + # Test that batch_upsert passes along use_field_ids result = table.batch_upsert( [{"fields": {cols.TEXT: "Two", cols.NUM: 8}}], key_fields=[cols.TEXT], - return_fields_by_field_id=True, + use_field_ids=True, ) assert result["records"] == [ { @@ -228,25 +236,27 @@ def test_batch_upsert(table: Table, cols): def test_integration_formula_datetime(table: Table, cols): - VALUE = datetime.utcnow() - str_value = fo.to_airtable_value(VALUE) - rv_create = table.create({cols.DATETIME: str_value}) - rv_first = table.first(formula=fo.match({cols.DATETIME: str_value})) + now = datetime.now(timezone.utc) + formula = match({cols.DATETIME: now}) + rv_create = table.create({cols.DATETIME: datetime_to_iso_str(now)}) + rv_first = table.first(formula=formula) assert rv_first and rv_first["id"] == rv_create["id"] def test_integration_formula_date_filter(table: Table, cols): - dt = datetime.utcnow() + dt = datetime.now(timezone.utc) + dt_str = datetime_to_iso_str(dt) date = dt.date() - date_str = fo.to_airtable_value(date) + date_str = date_to_iso_str(date) created = [] for _ in range(2): - rec = table.create({cols.DATETIME: fo.to_airtable_value(dt)}) + rec = table.create({cols.DATETIME: dt_str}) created.append(rec) - formula = fo.FIND(fo.STR_VALUE(date_str), fo.FIELD(cols.DATETIME)) + formula = FIND(date_str, Field(cols.DATETIME)) rv_all = table.all(formula=formula) + print("repr", repr(formula), "\nstr", str(formula)) assert rv_all assert set([r["id"] for r in rv_all]) == set([r["id"] for r in created]) @@ -254,12 +264,12 @@ def test_integration_formula_date_filter(table: Table, cols): def test_integration_field_equals_with_quotes(table: Table, cols): VALUE = "Contact's Name {}".format(uuid4()) rv_create = table.create({cols.TEXT: VALUE}) - rv_first = table.first(formula=fo.match({cols.TEXT: VALUE})) + rv_first = table.first(formula=match({cols.TEXT: VALUE})) assert rv_first and rv_first["id"] == rv_create["id"] VALUE = 'Some "Quote" {}'.format(uuid4()) rv_create = table.create({cols.TEXT: VALUE}) - rv_first = table.first(formula=fo.match({cols.TEXT: VALUE})) + rv_first = table.first(formula=match({cols.TEXT: VALUE})) assert rv_first and rv_first["id"] == rv_create["id"] @@ -269,12 +279,10 @@ def test_integration_formula_composition(table: Table, cols): bool_ = True rv_create = table.create({cols.TEXT: text, cols.NUM: num, cols.BOOL: bool_}) - formula = fo.AND( - fo.EQUAL(fo.FIELD(cols.TEXT), fo.to_airtable_value(text)), - fo.EQUAL(fo.FIELD(cols.NUM), fo.to_airtable_value(num)), - fo.EQUAL( - fo.FIELD(cols.BOOL), fo.to_airtable_value(bool_) - ), # not needs to be int() + formula = AND( + EQ(Field(cols.TEXT), text), + EQ(Field(cols.NUM), num), + EQ(Field(cols.BOOL), bool_), # not needs to be int() ) rv_first = table.first(formula=formula) @@ -282,23 +290,43 @@ def test_integration_formula_composition(table: Table, cols): def test_integration_attachment(table, cols, valid_img_url): - rec = table.create({cols.ATTACHMENT: [attachment(valid_img_url)]}) + rec = table.create({cols.ATTACHMENT: [{"url": valid_img_url}]}) rv_get = table.get(rec["id"]) - assert rv_get["fields"]["attachment"][0]["filename"] == "logo.png" + att = rv_get["fields"]["attachment"][0] + assert att["filename"] in ( + valid_img_url.rpartition("/")[-1], # sometimes taken from URL + "a." + valid_img_url.rpartition(".")[-1], # default if not + ) + original = requests.get(valid_img_url).content + attached = requests.get(att["url"]).content + assert original == attached def test_integration_attachment_multiple(table, cols, valid_img_url): rec = table.create( { cols.ATTACHMENT: [ - attachment(valid_img_url, filename="a.jpg"), - attachment(valid_img_url, filename="b.jpg"), + {"url": valid_img_url, "filename": "a.png"}, + {"url": valid_img_url, "filename": "b.png"}, ] } ) rv_get = table.get(rec["id"]) - assert rv_get["fields"]["attachment"][0]["filename"] == "a.jpg" - assert rv_get["fields"]["attachment"][1]["filename"] == "b.jpg" + assert rv_get["fields"]["attachment"][0]["filename"] == "a.png" + assert rv_get["fields"]["attachment"][1]["filename"] == "b.png" + + +def test_integration_upload_attachment(table, cols, valid_img_url, tmp_path): + rec = table.create({cols.ATTACHMENT: [{"url": valid_img_url, "filename": "a.png"}]}) + content = requests.get(valid_img_url).content + response = table.upload_attachment(rec["id"], cols.ATTACHMENT, "b.png", content) + attached = response["fields"][cols.ATTACHMENT_ID] + assert attached[0]["filename"] == "a.png" + assert attached[0]["type"] == "image/png" + assert attached[0]["size"] == 7297 + assert attached[1]["filename"] == "b.png" + assert attached[1]["type"] == "image/png" + assert attached[1]["size"] == 7297 def test_integration_comments(api, table: Table, cols): @@ -318,7 +346,46 @@ def test_integration_comments(api, table: Table, cols): comments[0].text = "Never mind!" comments[0].save() assert whoami not in comments[0].text - assert comments[0].mentioned is None + assert not comments[0].mentioned # Test that we can delete the comment comments[0].delete() + + +def test_pagination(cols, api, table): + """ + Test that we can paginate through records as expected. + """ + # Start by creating 500 unique records + created = table.batch_create([{cols.TEXT: f"Record {i}"} for i in range(500)]) + formula = OR(RECORD_ID().eq(record["id"]) for record in created[:-1]) + + # The formula ought to be longer than the maximum URL length, + # so we know we'll convert the request to a POST. + assert len(str(formula)) > api.MAX_URL_LENGTH + assert created[-1]["id"] not in str(formula) + + for page_size in [10, 50]: + paginator = table.iterate(formula=formula, page_size=page_size) + + # Test that each page is the expected size + assert len(page1 := next(paginator)) == page_size + assert len(page2 := next(paginator)) == page_size + + # Test that we don't keep getting the same records + page1_ids = {record["id"] for record in page1} + page2_ids = {record["id"] for record in page2} + assert page1_ids != page2_ids + + for max_records in [10, 50]: + # Test that max_records actually limits the number of records returned, + # not just the size of each page of records. + records = table.all(formula=formula, max_records=max_records) + assert len(records) == max_records + + # Test the combination of each. + paginator = table.iterate(formula=formula, page_size=10, max_records=25) + pages = list(paginator) + ids = {record["id"] for page in pages for record in page} + assert [len(page) for page in pages] == [10, 10, 5] + assert len(ids) == 25 diff --git a/tests/integration/test_integration_enterprise.py b/tests/integration/test_integration_enterprise.py new file mode 100644 index 00000000..f51a8ead --- /dev/null +++ b/tests/integration/test_integration_enterprise.py @@ -0,0 +1,148 @@ +import os +import uuid + +import pytest +from requests import HTTPError + +import pyairtable + +pytestmark = [pytest.mark.integration] + + +@pytest.fixture +def enterprise(api): + try: + return api.enterprise(os.environ["AIRTABLE_ENTERPRISE_ID"]) + except KeyError: + pytest.skip("test requires AIRTABLE_ENTERPRISE_ID") + + +@pytest.fixture +def workspace_id(): + return "wsp0HnyXmNnKzc5ng" + + +@pytest.fixture +def workspace(api: pyairtable.Api, workspace_id): + return api.workspace(workspace_id) + + +@pytest.fixture(autouse=True) +def confirm_enterprise_plan(workspace: pyairtable.Workspace): + try: + workspace.collaborators() + except HTTPError: + pytest.skip("This test requires creator access to an enterprise workspace") + + +@pytest.fixture +def blank_base(workspace: pyairtable.Workspace): + base = workspace.create_base( + f"Test {uuid.uuid1().hex}", + [{"name": "One", "fields": [{"type": "singleLineText", "name": "Label"}]}], + ) + try: + yield base + finally: + base.delete() + + +def test_user(enterprise: pyairtable.Enterprise): + """ + Test that we can retrieve information about the current logged-in user. + """ + user_id = enterprise.api.whoami()["id"] + assert user_id == enterprise.user(user_id).id + + +def test_user__invalid(enterprise): + with pytest.raises(HTTPError): + enterprise.user("invalidUserId") + + +def test_users(enterprise: pyairtable.Enterprise): + """ + Test that we can retrieve information about an enterprise + and retrieve user information by ID or by email. + """ + user_ids = enterprise.info().user_ids[:5] + users_from_ids = enterprise.users(user_ids) + assert {u.id for u in users_from_ids} == set(user_ids) + users_from_emails = enterprise.users(u.email for u in users_from_ids) + assert {u.id for u in users_from_emails} == set(user_ids) + + +def test_create_table(blank_base: pyairtable.Base): + """ + Test that we can create a new table on an existing base. + """ + table = blank_base.create_table("Two", [{"type": "singleLineText", "name": "Name"}]) + assert table.schema().field("Name").type == "singleLineText" + + +def test_update_table(blank_base: pyairtable.Base): + """ + Test that we can modify a table's name and description. + """ + new_name = f"Renamed {uuid.uuid1().hex[-6:]}" + schema = blank_base.schema().tables[0] + schema.name = new_name + schema.save() + assert blank_base.schema(force=True).tables[0].name == new_name + schema.description = "Renamed" + schema.save() + assert blank_base.schema(force=True).tables[0].description == "Renamed" + + +def test_create_field(blank_base: pyairtable.Base): + """ + Test that we can create a new field on an existing table. + """ + table = blank_base.tables()[0] + assert len(table.schema().fields) == 1 + fld = table.create_field( + "Status", + field_type="singleSelect", + options={ + "choices": [ + {"name": "Todo"}, + {"name": "In Progress"}, + {"name": "Done"}, + ] + }, + ) + # Ensure we don't need to reload the schema to see this new field + assert table._schema.field(fld.id).name == "Status" + + +def test_update_field(blank_base: pyairtable.Base): + """ + Test that we can modify a field's name and description. + """ + + def reload_field(): + return blank_base.schema(force=True).tables[0].fields[0] + + field = reload_field() + + new_name = f"Renamed {uuid.uuid1().hex[-6:]}" + field.name = new_name + field.save() + assert reload_field().name == new_name + + field.description = "Renamed" + field.save() + assert reload_field().description == "Renamed" + + +def test_audit_log(api): + """ + Test that we can call the audit log endpoint. + """ + if "AIRTABLE_ENTERPRISE_ID" not in os.environ: + return pytest.skip("test_audit_log requires AIRTABLE_ENTERPRISE_ID") + + enterprise = api.enterprise(os.environ["AIRTABLE_ENTERPRISE_ID"]) + for page in enterprise.audit_log(page_limit=1): + for event in page.events: + assert isinstance(event.action, str) diff --git a/tests/integration/test_integration_metadata.py b/tests/integration/test_integration_metadata.py index d1f25d60..315a6a91 100644 --- a/tests/integration/test_integration_metadata.py +++ b/tests/integration/test_integration_metadata.py @@ -1,31 +1,38 @@ import pytest +import requests -from pyairtable import Base, Table -from pyairtable.metadata import get_api_bases, get_base_schema, get_table_schema +from pyairtable import Api, Base pytestmark = [pytest.mark.integration] -def test_get_api_bases(base: Base, base_name: str): - rv = get_api_bases(base) - assert base_name in [b["name"] for b in rv["bases"]] +def test_api_bases(api: Api, base_id: str, base_name: str, table_name: str): + bases = {base.id: base for base in api.bases()} + assert bases[base_id].name == base_name + assert bases[base_id].table(table_name).name == table_name -def test_get_base_schema(base: Base): - rv = get_base_schema(base) - assert sorted(table["name"] for table in rv["tables"]) == [ - "Address", - "Contact", - "EVERYTHING", - "TEST_TABLE", - ] +def test_api_base(api: Api, base_id: str, base_name: str): + base = api.base(base_id, validate=True) + assert base.name == base_name -def test_get_table_schema(table: Table): - rv = get_table_schema(table) - assert rv and rv["name"] == table.name +def test_base_info(base: Base): + with pytest.raises( + requests.HTTPError, + match=r"Base.collaborators\(\) requires an enterprise billing plan", + ): + base.collaborators() -def test_get_table_schema__invalid_table(table, monkeypatch): - monkeypatch.setattr(table, "name", "DoesNotExist") - assert get_table_schema(table) is None +def test_base_schema(base: Base, table_name: str): + schema = base.schema() + assert table_name in [t.name for t in schema.tables] + assert schema.table(table_name).name == table_name + + +def test_table_schema(base: Base, table_name: str, cols): + schema = base.table(table_name).schema() + assert cols.TEXT in [f.name for f in schema.fields] + assert schema.field(cols.TEXT).id == cols.TEXT_ID + assert schema.field(cols.TEXT_ID).name == cols.TEXT diff --git a/tests/integration/test_integration_orm.py b/tests/integration/test_integration_orm.py index 1d064ee7..605f217e 100644 --- a/tests/integration/test_integration_orm.py +++ b/tests/integration/test_integration_orm.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone import pytest @@ -67,7 +67,7 @@ class _Everything(Model): formula_nan = f.TextField("Formula NaN", readonly=True) addresses = f.LinkField("Address", _Address) link_count = f.CountField("Link to Self (Count)") - link_self = f.LinkField["_Everything"]( + link_self = f.SingleLinkField["_Everything"]( "Link to Self", model="test_integration_orm._Everything", lazy=False, @@ -80,14 +80,31 @@ class _Everything(Model): created_by = f.CreatedByField("Created By") last_modified = f.LastModifiedTimeField("Last Modified") last_modified_by = f.LastModifiedByField("Last Modified By") + required_barcode = f.RequiredBarcodeField("Barcode") + required_collaborator = f.RequiredCollaboratorField("Assignee") + required_count = f.RequiredCountField("Count") + required_currency = f.RequiredCurrencyField("Dollars") + required_date = f.RequiredDateField("Date") + required_datetime = f.RequiredDatetimeField("DateTime") + required_duration = f.RequiredDurationField("Duration (h:mm)") + required_email = f.RequiredEmailField("Email") + required_float = f.RequiredFloatField("Decimal 1") + required_integer = f.RequiredIntegerField("Integer") + required_number = f.RequiredNumberField("Number") + required_percent = f.RequiredPercentField("Percent") + required_phone = f.RequiredPhoneNumberField("Phone") + required_rating = f.RequiredRatingField("Stars") + required_rich_text = f.RequiredRichTextField("Notes") + required_select = f.RequiredSelectField("Status") + required_text = f.RequiredTextField("Name") + required_url = f.RequiredUrlField("URL") def _model_fixture(cls, monkeypatch, make_meta): monkeypatch.setattr(cls, "Meta", make_meta(cls.__name__.replace("_", ""))) yield cls - table = cls.get_table() - for page in table.iterate(): - table.batch_delete([record["id"] for record in page]) + for page in cls.meta.table.iterate(): + cls.meta.table.batch_delete([record["id"] for record in page]) @pytest.fixture @@ -116,16 +133,16 @@ def test_integration_orm(Contact, Address): email="email@email.com", is_registered=True, address=[address], - birthday=datetime.utcnow().date(), - last_access=datetime.utcnow(), + birthday=datetime.now(timezone.utc).date(), + last_access=datetime.now(timezone.utc), ) assert contact.first_name == "John" - assert contact.save() + assert contact.save().created assert contact.id contact.first_name = "Not Gui" - assert not contact.save() + assert not contact.save().created rv_address = contact.address[0] assert rv_address.exists() @@ -153,7 +170,7 @@ class Contact(Model): first_name = f.TextField("First Name") last_name = f.TextField("Last Name") - table = Contact.get_table() + table = Contact.meta.table record = table.create( { "First Name": "Alice", @@ -180,7 +197,17 @@ def test_every_field(Everything): type(field) for field in vars(Everything).values() if isinstance(field, f.Field) } for field_class in f.ALL_FIELDS: - if field_class in {f.ExternalSyncSourceField}: + if field_class in { + f.ExternalSyncSourceField, + f.AITextField, + f.RequiredAITextField, + f.ManualSortField, + # These are so similar to TextField we don't need to integration test them + f.SingleLineTextField, + f.MultilineTextField, + f.RequiredSingleLineTextField, + f.RequiredMultilineTextField, + }: continue assert field_class in classes_used @@ -213,13 +240,13 @@ def test_every_field(Everything): record.save() assert record.id assert record.addresses == [] - assert record.link_self == [] - record.link_self = [record] + assert record.link_self is None + record.link_self = record record.save() # The ORM won't refresh the model's field values after save() assert record.formula_integer is None - assert record.formula_nan is None + assert record.formula_nan == "" assert record.link_count is None assert record.lookup_error == [] assert record.lookup_integer == [] @@ -229,3 +256,33 @@ def test_every_field(Everything): assert record.link_count == 1 assert record.lookup_error == [{"error": "#ERROR!"}] assert record.lookup_integer == [record.formula_integer] + + +def test_attachments_upload(Everything, valid_img_url, tmp_path): + record: _Everything = Everything() + record.save() + + # add an attachment via URL + record.attachments.append({"url": valid_img_url, "filename": "logo.png"}) + record.save() + assert record.attachments[0]["url"] == valid_img_url + record.fetch() + assert record.attachments[0]["id"] is not None + assert record.attachments[0]["filename"] == "logo.png" + + # add an attachment by uploading content + tmp_file = tmp_path / "sample.txt" + tmp_file.write_text("Hello, World!") + record.attachments.upload(tmp_file) + # ensure we got all attachments, not just the latest one + assert record.attachments[0]["filename"] == "logo.png" + assert record.attachments[0]["type"] == "image/png" + assert record.attachments[1]["filename"] == "sample.txt" + assert record.attachments[1]["type"] == "text/plain" + + # ensure everything persists/loads correctly after fetch() + record.fetch() + assert record.attachments[0]["filename"] == "logo.png" + assert record.attachments[0]["type"] == "image/png" + assert record.attachments[1]["filename"] == "sample.txt" + assert record.attachments[1]["type"] == "text/plain" diff --git a/tests/sample_data/AuditLogResponse.json b/tests/sample_data/AuditLogResponse.json new file mode 100644 index 00000000..4b9e3c5b --- /dev/null +++ b/tests/sample_data/AuditLogResponse.json @@ -0,0 +1,39 @@ +{ + "events": [ + { + "action": "createBase", + "actor": { + "type": "user", + "user": { + "email": "foo@bar.com", + "id": "usrL2PNC5o3H4lBEi", + "name": "Jane Doe" + } + }, + "context": { + "actionId": "actxr1mLqZz1T35FA", + "baseId": "appLkNDICXNqxSDhG", + "enterpriseAccountId": "entUBq2RGdihxl3vU", + "interfaceId": "pbdyGA3PsOziEHPDE", + "workspaceId": "wspmhESAta6clCCwF" + }, + "id": "01FYFFDE39BDDBC0HWK51R6GPF", + "modelId": "appLkNDICXNqxSDhG", + "modelType": "base", + "origin": { + "ipAddress": "1.2.3.4", + "sessionId": "sesE3ulSADiRNhqAv", + "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36" + }, + "payload": { + "name": "My newly created base!" + }, + "payloadVersion": "1.0", + "timestamp": "2022-02-01T21:25:05.663Z" + } + ], + "pagination": { + "next": "MDFHUk5OMlM4MFhTNkY0R0M2QVlZTVZNNDQ=", + "previous": "MDFHUk5ITVhNMEE4UFozTlg1SlFaRlMyOFM=" + } +} diff --git a/tests/sample_data/BaseCollaborators.json b/tests/sample_data/BaseCollaborators.json new file mode 100644 index 00000000..411b4b56 --- /dev/null +++ b/tests/sample_data/BaseCollaborators.json @@ -0,0 +1,130 @@ +{ + "collaborators": { + "baseCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bam.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "create", + "userId": "usrsOEchC9xuwRgKk" + } + ], + "workspaceCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bar.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "owner", + "userId": "usrL2PNC5o3H4lBEi" + } + ] + }, + "createdTime": "2019-01-03T12:33:12.421Z", + "groupCollaborators": { + "baseCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "groupId": "ugpR8ZT9KtIgp8Bh3", + "name": "group 2", + "permissionLevel": "create" + } + ], + "workspaceCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "groupId": "ugp1mKGb3KXUyQfOZ", + "name": "group 1", + "permissionLevel": "edit" + } + ] + }, + "id": "appLkNDICXNqxSDhG", + "individualCollaborators": { + "baseCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bam.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "create", + "userId": "usrsOEchC9xuwRgKk" + } + ], + "workspaceCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bar.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "owner", + "userId": "usrL2PNC5o3H4lBEi" + } + ] + }, + "interfaces": { + "pbdLkNDICXNqxSDhG": { + "createdTime": "2024-02-04T02:28:06.000Z", + "firstPublishTime": "2024-02-04T02:28:12.000Z", + "groupCollaborators": [ + { + "createdTime": "2024-02-04T02:28:20.184Z", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "groupId": "ugpR8ZT9KtIgp8Bh3", + "name": "Test Interface", + "permissionLevel": "read" + } + ], + "id": "pbdLkNDICXNqxSDhG", + "individualCollaborators": [ + { + "createdTime": "2024-02-04T04:00:00.749Z", + "email": "test@example.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "edit", + "userId": "usrR8ZT9KtIgp8Bh3" + } + ], + "inviteLinks": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "id": "invJiqaXmPqq6ABCD", + "invitedEmail": "bam@bam.com", + "permissionLevel": "edit", + "referredByUserId": "usrL2PNC5o3H4lBEi", + "restrictedToEmailDomains": [], + "type": "singleUse" + } + ], + "name": "Interface" + } + }, + "inviteLinks": { + "baseInviteLinks": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "id": "invJiqaXmPqq6Ec87", + "invitedEmail": null, + "permissionLevel": "read", + "referredByUserId": "usrsOEchC9xuwRgKk", + "restrictedToEmailDomains": [ + "bam.com" + ], + "type": "multiUse" + } + ], + "workspaceInviteLinks": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "id": "invJiqaXmPqq6Ec99", + "invitedEmail": "bam@bam.com", + "permissionLevel": "edit", + "referredByUserId": "usrL2PNC5o3H4lBEi", + "restrictedToEmailDomains": [], + "type": "singleUse" + } + ] + }, + "name": "my first base", + "permissionLevel": "none", + "workspaceId": "wspmhESAta6clCCwF" +} diff --git a/tests/sample_data/BaseSchema.json b/tests/sample_data/BaseSchema.json new file mode 100644 index 00000000..5577acee --- /dev/null +++ b/tests/sample_data/BaseSchema.json @@ -0,0 +1,75 @@ +{ + "tables": [ + { + "description": "Apartments to track.", + "fields": [ + { + "description": "Name of the apartment", + "id": "fld1VnoyuotSTyxW1", + "name": "Name", + "type": "singleLineText" + }, + { + "id": "fldoaIqdn5szURHpw", + "name": "Pictures", + "options": { + "isReversed": false + }, + "type": "multipleAttachments" + }, + { + "id": "fldumZe00w09RYTW6", + "name": "District", + "options": { + "inverseLinkFieldId": "fldWnCJlo2z6ttT8Y", + "isReversed": false, + "linkedTableId": "tblK6MZHez0ZvBChZ", + "prefersSingleRecordLink": true + }, + "type": "multipleRecordLinks" + } + ], + "id": "tbltp8DGLhqbUmjK1", + "name": "Apartments", + "primaryFieldId": "fld1VnoyuotSTyxW1", + "views": [ + { + "id": "viwQpsuEDqHFqegkp", + "name": "Grid view", + "type": "grid" + } + ] + }, + { + "fields": [ + { + "id": "fldEVzvQOoULO38yl", + "name": "Name", + "type": "singleLineText" + }, + { + "description": "Apartments that belong to this district", + "id": "fldWnCJlo2z6ttT8Y", + "name": "Apartments", + "options": { + "inverseLinkFieldId": "fldumZe00w09RYTW6", + "isReversed": false, + "linkedTableId": "tbltp8DGLhqbUmjK1", + "prefersSingleRecordLink": false + }, + "type": "multipleRecordLinks" + } + ], + "id": "tblK6MZHez0ZvBChZ", + "name": "Districts", + "primaryFieldId": "fldEVzvQOoULO38yl", + "views": [ + { + "id": "viwi3KXvrKug2mIBS", + "name": "Grid view", + "type": "grid" + } + ] + } + ] +} diff --git a/tests/sample_data/BaseShares.json b/tests/sample_data/BaseShares.json new file mode 100644 index 00000000..a52ee8af --- /dev/null +++ b/tests/sample_data/BaseShares.json @@ -0,0 +1,44 @@ +{ + "shares": [ + { + "createdByUserId": "usrL2PNC5o3H4lBEi", + "createdTime": "2019-01-01T00:00:00.000Z", + "effectiveEmailDomainAllowList": [ + "foobar.com" + ], + "isPasswordProtected": true, + "restrictedToEnterpriseMembers": false, + "restrictedToEmailDomains": [ + "foobar.com" + ], + "shareId": "shr9SpczJvQpfAzSp", + "shareTokenPrefix": "shr9Spcz", + "state": "enabled", + "type": "base" + }, + { + "createdByUserId": "usrL2PNC5o3H4lBEi", + "createdTime": "2019-01-01T00:00:00.000Z", + "isPasswordProtected": false, + "restrictedToEnterpriseMembers": false, + "restrictedToEmailDomains": [], + "shareId": "shrMg5vs9SpczJvQp", + "shareTokenPrefix": "shrMg5vs", + "state": "disabled", + "type": "view", + "viewId": "viwQpsuEDqHFqegkp" + }, + { + "blockInstallationId": "bliXyN0Q6zfajnDOG", + "createdByUserId": "usrL2PNC5o3H4lBEi", + "createdTime": "2019-01-01T00:00:00.000Z", + "isPasswordProtected": false, + "restrictedToEnterpriseMembers": false, + "restrictedToEmailDomains": [], + "shareId": "shrjjKdhMg5vs9Spc", + "shareTokenPrefix": "shrjjKdh", + "state": "disabled", + "type": "blockInstallation" + } + ] +} diff --git a/tests/sample_data/Bases.json b/tests/sample_data/Bases.json new file mode 100644 index 00000000..ad865743 --- /dev/null +++ b/tests/sample_data/Bases.json @@ -0,0 +1,14 @@ +{ + "bases": [ + { + "id": "appLkNDICXNqxSDhG", + "name": "Apartment Hunting", + "permissionLevel": "create" + }, + { + "id": "appSW9R5uCNmRmfl6", + "name": "Project Tracker", + "permissionLevel": "edit" + } + ] +} diff --git a/tests/sample_data/Comment.json b/tests/sample_data/Comment.json new file mode 100644 index 00000000..21336ae0 --- /dev/null +++ b/tests/sample_data/Comment.json @@ -0,0 +1,28 @@ +{ + "author": { + "id": "usrLkNDICXNqxSDhG", + "email": "author@example.com" + }, + "createdTime": "2019-01-03T12:33:12.421Z", + "id": "comLkNDICXNqxSDhG", + "lastUpdatedTime": "2019-01-03T12:33:12.421Z", + "text": "Hello, @[usr00000mentioned]!", + "mentioned": { + "usr00000mentioned": { + "displayName": "Alice Doe", + "id": "usr00000mentioned", + "email": "alice@example.com", + "type": "user" + } + }, + "parentCommentId": "comkNDICXNqxSDhGL", + "reactions": [ + { + "emoji": {"unicodeCharacter": "1f44d"}, + "reactingUser": { + "userId": "usr0000000reacted", + "email": "carol@example.com" + } + } + ] +} diff --git a/tests/sample_data/EnterpriseInfo.json b/tests/sample_data/EnterpriseInfo.json new file mode 100644 index 00000000..bd848286 --- /dev/null +++ b/tests/sample_data/EnterpriseInfo.json @@ -0,0 +1,27 @@ +{ + "createdTime": "2019-01-03T12:33:12.421Z", + "descendantEnterpriseAccountIds": [ + "entJ9ZQ5vz9ZQ5vz9" + ], + "emailDomains": [ + { + "emailDomain": "foobar.com", + "isSsoRequired": true + } + ], + "groupIds": [ + "ugp1mKGb3KXUyQfOZ", + "ugpR8ZT9KtIgp8Bh3" + ], + "id": "entUBq2RGdihxl3vU", + "rootEnterpriseAccountId": "entUBq2RGdihxl3vU", + "userIds": [ + "usrL2PNC5o3H4lBEi", + "usrsOEchC9xuwRgKk", + "usrGcrteE5fUMqq0R" + ], + "workspaceIds": [ + "wspmhESAta6clCCwF", + "wspHvvm4dAktsStZH" + ] +} diff --git a/tests/sample_data/TableSchema.json b/tests/sample_data/TableSchema.json new file mode 100644 index 00000000..c91024e5 --- /dev/null +++ b/tests/sample_data/TableSchema.json @@ -0,0 +1,40 @@ +{ + "description": "Apartments to track.", + "fields": [ + { + "description": "Name of the apartment", + "id": "fld1VnoyuotSTyxW1", + "name": "Name", + "type": "singleLineText" + }, + { + "id": "fldoaIqdn5szURHpw", + "name": "Pictures", + "options": { + "isReversed": false + }, + "type": "multipleAttachments" + }, + { + "id": "fldumZe00w09RYTW6", + "name": "District", + "options": { + "inverseLinkFieldId": "fldWnCJlo2z6ttT8Y", + "isReversed": false, + "linkedTableId": "tblK6MZHez0ZvBChZ", + "prefersSingleRecordLink": true + }, + "type": "multipleRecordLinks" + } + ], + "id": "tbltp8DGLhqbUmjK1", + "name": "Apartments", + "primaryFieldId": "fld1VnoyuotSTyxW1", + "views": [ + { + "id": "viwQpsuEDqHFqegkp", + "name": "Grid view", + "type": "grid" + } + ] +} diff --git a/tests/sample_data/UserGroup.json b/tests/sample_data/UserGroup.json new file mode 100644 index 00000000..ef22b489 --- /dev/null +++ b/tests/sample_data/UserGroup.json @@ -0,0 +1,61 @@ +{ + "collaborations": { + "baseCollaborations": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2021-06-02T07:37:50.000Z", + "grantedByUserId": "usrogvSbotRtzdtZW", + "permissionLevel": "create" + } + ], + "interfaceCollaborations": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrqccqnMB2eHylqB", + "interfaceId": "pbdyGA3PsOziEHPDE", + "permissionLevel": "edit" + } + ], + "workspaceCollaborations": [ + { + "createdTime": "2021-06-02T07:37:48.000Z", + "grantedByUserId": "usrqccqnMB2eHylqB", + "permissionLevel": "edit", + "workspaceId": "wspmhESAta6clCCwF" + } + ] + }, + "createdTime": "2021-06-02T07:37:19.000Z", + "enterpriseAccountId": "entUBq2RGdihxl3vU", + "id": "ugp1mKGb3KXUyQfOZ", + "mappedUserLicenseType": "contributor", + "members": [ + { + "createdTime": "2021-06-02T07:37:19.000Z", + "email": "foo@bar.com", + "firstName": "Jane", + "lastName": "Doe", + "role": "member", + "userId": "usrL2PNC5o3H4lBEi" + }, + { + "createdTime": "2021-06-02T07:37:19.000Z", + "email": "foo@bam.com", + "firstName": "Alex", + "lastName": "Hay", + "role": "manager", + "userId": "usrsOEchC9xuwRgKk" + }, + { + "createdTime": "2021-06-02T07:37:19.000Z", + "email": "bam@bam.com", + "firstName": "John", + "lastName": "Dane", + "role": "member", + "userId": "usrGcrteE5fUMqq0R" + } + ], + "name": "Group name", + "updatedTime": "2022-09-02T10:10:35.000Z" +} diff --git a/tests/sample_data/UserInfo.json b/tests/sample_data/UserInfo.json new file mode 100644 index 00000000..71bf12f4 --- /dev/null +++ b/tests/sample_data/UserInfo.json @@ -0,0 +1,49 @@ +{ + "collaborations": { + "baseCollaborations": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrqccqnMB2eHylqB", + "permissionLevel": "edit" + } + ], + "interfaceCollaborations": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrogvSbotRtzdtZW", + "interfaceId": "pbdyGA3PsOziEHPDE", + "permissionLevel": "edit" + } + ], + "workspaceCollaborations": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrGcrteE5fUMqq0R", + "permissionLevel": "owner", + "workspaceId": "wspmhESAta6clCCwF" + } + ] + }, + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bar.com", + "groups": [ + { + "id": "ugp1mKGb3KXUyQfOZ" + }, + { + "id": "ugpR8ZT9KtIgp8Bh3" + } + ], + "id": "usrL2PNC5o3H4lBEi", + "invitedToAirtableByUserId": "usrsOEchC9xuwRgKk", + "isAdmin": true, + "isManaged": true, + "isServiceAccount": false, + "isSsoRequired": true, + "isTwoFactorAuthEnabled": false, + "lastActivityTime": "2019-01-03T12:33:12.421Z", + "name": "foo baz", + "state": "provisioned" +} diff --git a/tests/sample_data/UserRemoved.json b/tests/sample_data/UserRemoved.json new file mode 100644 index 00000000..006bea41 --- /dev/null +++ b/tests/sample_data/UserRemoved.json @@ -0,0 +1,40 @@ +{ + "shared": { + "workspaces": [ + { + "permissionLevel": "owner", + "userId": "usrL2PNC5o3H4lBEi", + "workspaceId": "wsp00000000000000", + "workspaceName": "Workspace name" + } + ] + }, + "unshared": { + "bases": [ + { + "baseId": "app00000000000000", + "baseName": "Base name", + "formerPermissionLevel": "create", + "userId": "usr00000000000000" + } + ], + "interfaces": [ + { + "baseId": "app00000000000000", + "formerPermissionLevel": "create", + "interfaceId": "pgb00000000000000", + "interfaceName": "Interface name", + "userId": "usr00000000000000" + } + ], + "workspaces": [ + { + "formerPermissionLevel": "owner", + "userId": "usr00000000000000", + "workspaceId": "wsp00000000000000", + "workspaceName": "Workspace name" + } + ] + }, + "wasUserRemovedAsAdmin": true +} diff --git a/tests/sample_data/ViewSchema.json b/tests/sample_data/ViewSchema.json new file mode 100644 index 00000000..e5530fd0 --- /dev/null +++ b/tests/sample_data/ViewSchema.json @@ -0,0 +1,12 @@ + +{ + "id": "viwQpsuEDqHFqegkp", + "name": "My Grid View", + "personalForUserId": "usrL2PNC5o3H4lBEi", + "type": "grid", + "visibleFieldIds": [ + "fldL2PNC5o3H4lBE1", + "fldL2PNC5o3H4lBE2", + "fldL2PNC5o3H4lBE3" + ] +} diff --git a/tests/sample_data/Webhook.json b/tests/sample_data/Webhook.json index 66028168..839917e2 100644 --- a/tests/sample_data/Webhook.json +++ b/tests/sample_data/Webhook.json @@ -14,13 +14,30 @@ "success": false, "willBeRetried": true }, - "lastSuccessfulNotificationTime": null, + "lastSuccessfulNotificationTime": "2022-02-01T21:25:05.663Z", "notificationUrl": "https://example.com/receive-ping", "specification": { "options": { "filters": { "dataTypes": ["tableData"], - "recordChangeScope": "tbltp8DGLhqbUmjK1" + "changeTypes": ["add", "remove", "update"], + "fromSources": ["client"], + "recordChangeScope": "tbltp8DGLhqbUmjK1", + "sourceOptions": { + "formPageSubmission": { + "pageId": "pbdLkNDICXNqxSDhG" + }, + "formSubmission": { + "viewId": "viwLkNDICXNqxSDhG" + } + }, + "watchDataInFieldIds": ["fldLkNDICXNqxSDhG"], + "watchSchemasOfFieldIds": ["fldLkNDICXNqxSDhG"] + }, + "includes": { + "includeCellValuesInFieldIds": "all", + "includePreviousCellValues": false, + "includePreviousFieldDefinitions": false } } } diff --git a/tests/sample_data/WorkspaceCollaborators.json b/tests/sample_data/WorkspaceCollaborators.json new file mode 100644 index 00000000..3d208014 --- /dev/null +++ b/tests/sample_data/WorkspaceCollaborators.json @@ -0,0 +1,103 @@ +{ + "baseIds": [ + "appLkNDICXNqxSDhG", + "appSW9R5uCNmRmfl6" + ], + "collaborators": { + "baseCollaborators": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bam.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "create", + "userId": "usrsOEchC9xuwRgKk" + } + ], + "workspaceCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bar.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "owner", + "userId": "usrL2PNC5o3H4lBEi" + } + ] + }, + "createdTime": "2019-01-03T12:33:12.421Z", + "groupCollaborators": { + "baseCollaborators": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "groupId": "ugpR8ZT9KtIgp8Bh3", + "name": "group 2", + "permissionLevel": "create" + } + ], + "workspaceCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "groupId": "ugp1mKGb3KXUyQfOZ", + "name": "group 1", + "permissionLevel": "edit" + } + ] + }, + "id": "wspmhESAta6clCCwF", + "individualCollaborators": { + "baseCollaborators": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bam.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "create", + "userId": "usrsOEchC9xuwRgKk" + } + ], + "workspaceCollaborators": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "email": "foo@bar.com", + "grantedByUserId": "usrL2PNC5o3H4lBEi", + "permissionLevel": "owner", + "userId": "usrL2PNC5o3H4lBEi" + } + ] + }, + "inviteLinks": { + "baseInviteLinks": [ + { + "baseId": "appLkNDICXNqxSDhG", + "createdTime": "2019-01-03T12:33:12.421Z", + "id": "invJiqaXmPqqAPP99", + "invitedEmail": null, + "permissionLevel": "read", + "referredByUserId": "usrsOEchC9xuwRgKk", + "restrictedToEmailDomains": [], + "type": "multiUse" + } + ], + "workspaceInviteLinks": [ + { + "createdTime": "2019-01-03T12:33:12.421Z", + "id": "invJiqaXmPqqWSP00", + "invitedEmail": "bam@bam.com", + "permissionLevel": "owner", + "referredByUserId": "usrL2PNC5o3H4lBEi", + "restrictedToEmailDomains": [ + "foobar.com" + ], + "type": "singleUse" + } + ] + }, + "name": "my first workspace", + "workspaceRestrictions": { + "inviteCreationRestriction": "onlyOwners", + "shareCreationRestriction": "unrestricted" + } +} diff --git a/tests/sample_data/field_schema/AITextFieldSchema.json b/tests/sample_data/field_schema/AITextFieldSchema.json new file mode 100644 index 00000000..76285c06 --- /dev/null +++ b/tests/sample_data/field_schema/AITextFieldSchema.json @@ -0,0 +1,13 @@ +{ + "type": "aiText", + "id": "fld8cfZQtRNaiText", + "name": "AI Text", + "options": { + "prompt": [ + { + "field": {"fieldId": "fldEHLmq3SvZCvgOT"} + } + ], + "referencedFieldIds": ["fldEHLmq3SvZCvgOT"] + } +} diff --git a/tests/sample_data/field_schema/AutoNumberFieldSchema.json b/tests/sample_data/field_schema/AutoNumberFieldSchema.json new file mode 100644 index 00000000..7bd1fea3 --- /dev/null +++ b/tests/sample_data/field_schema/AutoNumberFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "autoNumber", + "id": "fldIbDLEAaZTbedTy", + "name": "Autonumber" +} diff --git a/tests/sample_data/field_schema/BarcodeFieldSchema.json b/tests/sample_data/field_schema/BarcodeFieldSchema.json new file mode 100644 index 00000000..aa16f858 --- /dev/null +++ b/tests/sample_data/field_schema/BarcodeFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "barcode", + "id": "flduAhdFgj0sUOiJ6", + "name": "Barcode" +} diff --git a/tests/sample_data/field_schema/ButtonFieldSchema.json b/tests/sample_data/field_schema/ButtonFieldSchema.json new file mode 100644 index 00000000..d4800ab1 --- /dev/null +++ b/tests/sample_data/field_schema/ButtonFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "button", + "id": "fldPamhjpzu0W8VQU", + "name": "Open URL" +} diff --git a/tests/sample_data/field_schema/CheckboxFieldSchema.json b/tests/sample_data/field_schema/CheckboxFieldSchema.json new file mode 100644 index 00000000..ed50f53b --- /dev/null +++ b/tests/sample_data/field_schema/CheckboxFieldSchema.json @@ -0,0 +1,9 @@ +{ + "type": "checkbox", + "options": { + "icon": "check", + "color": "greenBright" + }, + "id": "fldxX0QdH2ROzqDS9", + "name": "Done" +} diff --git a/tests/sample_data/field_schema/CountFieldSchema.json b/tests/sample_data/field_schema/CountFieldSchema.json new file mode 100644 index 00000000..c146153b --- /dev/null +++ b/tests/sample_data/field_schema/CountFieldSchema.json @@ -0,0 +1,9 @@ +{ + "type": "count", + "options": { + "isValid": true, + "recordLinkFieldId": "fldNvFMYxBnf35WkO" + }, + "id": "fldVCMT5NmDVX2IyW", + "name": "Link to Self (Count)" +} diff --git a/tests/sample_data/field_schema/CreatedByFieldSchema.json b/tests/sample_data/field_schema/CreatedByFieldSchema.json new file mode 100644 index 00000000..aaaec311 --- /dev/null +++ b/tests/sample_data/field_schema/CreatedByFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "createdBy", + "id": "fldq5hwHSdDpt77oh", + "name": "Created By" +} diff --git a/tests/sample_data/field_schema/CreatedTimeFieldSchema.json b/tests/sample_data/field_schema/CreatedTimeFieldSchema.json new file mode 100644 index 00000000..2f1c4f1e --- /dev/null +++ b/tests/sample_data/field_schema/CreatedTimeFieldSchema.json @@ -0,0 +1,21 @@ +{ + "type": "createdTime", + "options": { + "result": { + "type": "dateTime", + "options": { + "dateFormat": { + "name": "local", + "format": "l" + }, + "timeFormat": { + "name": "12hour", + "format": "h:mma" + }, + "timeZone": "client" + } + } + }, + "id": "fldnejiVookFfecbE", + "name": "Created" +} diff --git a/tests/sample_data/field_schema/CurrencyFieldSchema.json b/tests/sample_data/field_schema/CurrencyFieldSchema.json new file mode 100644 index 00000000..f80cc0cf --- /dev/null +++ b/tests/sample_data/field_schema/CurrencyFieldSchema.json @@ -0,0 +1,9 @@ +{ + "type": "currency", + "options": { + "precision": 2, + "symbol": "$" + }, + "id": "fldotSLYreNpZ1oD3", + "name": "Dollars" +} diff --git a/tests/sample_data/field_schema/DateFieldSchema.json b/tests/sample_data/field_schema/DateFieldSchema.json new file mode 100644 index 00000000..1ef04774 --- /dev/null +++ b/tests/sample_data/field_schema/DateFieldSchema.json @@ -0,0 +1,11 @@ +{ + "type": "date", + "options": { + "dateFormat": { + "name": "local", + "format": "l" + } + }, + "id": "fldgstMCbpPNgbXBN", + "name": "Date" +} diff --git a/tests/sample_data/field_schema/DateTimeFieldSchema.json b/tests/sample_data/field_schema/DateTimeFieldSchema.json new file mode 100644 index 00000000..ca046956 --- /dev/null +++ b/tests/sample_data/field_schema/DateTimeFieldSchema.json @@ -0,0 +1,16 @@ +{ + "type": "dateTime", + "options": { + "dateFormat": { + "name": "local", + "format": "l" + }, + "timeFormat": { + "name": "12hour", + "format": "h:mma" + }, + "timeZone": "client" + }, + "id": "fldVK2DrQVUZDYfvu", + "name": "DateTime" +} diff --git a/tests/sample_data/field_schema/DurationFieldSchema.json b/tests/sample_data/field_schema/DurationFieldSchema.json new file mode 100644 index 00000000..3369abc1 --- /dev/null +++ b/tests/sample_data/field_schema/DurationFieldSchema.json @@ -0,0 +1,8 @@ +{ + "type": "duration", + "options": { + "durationFormat": "h:mm:ss.S" + }, + "id": "fldiHUj34Rtni86oW", + "name": "Duration (h:mm:ss.s)" +} diff --git a/tests/sample_data/field_schema/EmailFieldSchema.json b/tests/sample_data/field_schema/EmailFieldSchema.json new file mode 100644 index 00000000..c394932b --- /dev/null +++ b/tests/sample_data/field_schema/EmailFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "email", + "id": "fldXk4Av7rZVXhjYW", + "name": "Email" +} diff --git a/tests/sample_data/field_schema/ExternalSyncSourceFieldSchema.json b/tests/sample_data/field_schema/ExternalSyncSourceFieldSchema.json new file mode 100644 index 00000000..54b61de4 --- /dev/null +++ b/tests/sample_data/field_schema/ExternalSyncSourceFieldSchema.json @@ -0,0 +1,24 @@ +{ + "type": "externalSyncSource", + "options": { + "choices": [ + { + "id": "selX4cKixuDso5PEX", + "name": "One", + "color": "blueLight2" + }, + { + "id": "seloqP1Gzj8Glrspo", + "name": "Two", + "color": "cyanLight2" + }, + { + "id": "selqPFVai30QE6Kp8", + "name": "Three", + "color": "tealLight2" + } + ] + }, + "id": "fldI5gjLnq0RXUleV", + "name": "Sync Source" +} diff --git a/tests/sample_data/field_schema/FormulaFieldSchema.json b/tests/sample_data/field_schema/FormulaFieldSchema.json new file mode 100644 index 00000000..509c7bba --- /dev/null +++ b/tests/sample_data/field_schema/FormulaFieldSchema.json @@ -0,0 +1,18 @@ +{ + "type": "formula", + "options": { + "formula": "{fldgstMCbpPNgbXBN} + \"FOO\"", + "isValid": true, + "referencedFieldIds": [ + "fldgstMCbpPNgbXBN" + ], + "result": { + "type": "number", + "options": { + "precision": 0 + } + } + }, + "id": "fldBeqw3negrW9MMK", + "name": "Formula NaN" +} diff --git a/tests/sample_data/field_schema/LastModifiedByFieldSchema.json b/tests/sample_data/field_schema/LastModifiedByFieldSchema.json new file mode 100644 index 00000000..580d56e3 --- /dev/null +++ b/tests/sample_data/field_schema/LastModifiedByFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "lastModifiedBy", + "id": "fld3oKAJRAQAqHe25", + "name": "Last Modified By" +} diff --git a/tests/sample_data/field_schema/LastModifiedTimeFieldSchema.json b/tests/sample_data/field_schema/LastModifiedTimeFieldSchema.json new file mode 100644 index 00000000..090b608c --- /dev/null +++ b/tests/sample_data/field_schema/LastModifiedTimeFieldSchema.json @@ -0,0 +1,23 @@ +{ + "type": "lastModifiedTime", + "options": { + "isValid": true, + "referencedFieldIds": [], + "result": { + "type": "dateTime", + "options": { + "dateFormat": { + "name": "local", + "format": "l" + }, + "timeFormat": { + "name": "12hour", + "format": "h:mma" + }, + "timeZone": "client" + } + } + }, + "id": "fldoGZqYuPMGHyt51", + "name": "Last Modified" +} diff --git a/tests/sample_data/field_schema/ManualSortFieldSchema.json b/tests/sample_data/field_schema/ManualSortFieldSchema.json new file mode 100644 index 00000000..d99cfa36 --- /dev/null +++ b/tests/sample_data/field_schema/ManualSortFieldSchema.json @@ -0,0 +1,6 @@ +{ + "type": "manualSort", + "options": null, + "id": "fldqCjrs1UhXgHUIc", + "name": "Record Sort" +} diff --git a/tests/sample_data/field_schema/MultilineTextFieldSchema.json b/tests/sample_data/field_schema/MultilineTextFieldSchema.json new file mode 100644 index 00000000..fce38c94 --- /dev/null +++ b/tests/sample_data/field_schema/MultilineTextFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "multilineText", + "id": "fldEHLmq3SvZCvgOT", + "name": "Notes" +} diff --git a/tests/sample_data/field_schema/MultipleAttachmentsFieldSchema.json b/tests/sample_data/field_schema/MultipleAttachmentsFieldSchema.json new file mode 100644 index 00000000..05d321c6 --- /dev/null +++ b/tests/sample_data/field_schema/MultipleAttachmentsFieldSchema.json @@ -0,0 +1,8 @@ +{ + "type": "multipleAttachments", + "options": { + "isReversed": false + }, + "id": "fldDACsrCOBiPrlZv", + "name": "Attachments" +} diff --git a/tests/sample_data/field_schema/MultipleCollaboratorsFieldSchema.json b/tests/sample_data/field_schema/MultipleCollaboratorsFieldSchema.json new file mode 100644 index 00000000..b5fefa4b --- /dev/null +++ b/tests/sample_data/field_schema/MultipleCollaboratorsFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "multipleCollaborators", + "id": "fldbDOAG70ADnuF1g", + "name": "Watchers" +} diff --git a/tests/sample_data/field_schema/MultipleLookupValuesFieldSchema.json b/tests/sample_data/field_schema/MultipleLookupValuesFieldSchema.json new file mode 100644 index 00000000..c8982908 --- /dev/null +++ b/tests/sample_data/field_schema/MultipleLookupValuesFieldSchema.json @@ -0,0 +1,32 @@ +{ + "type": "multipleLookupValues", + "options": { + "isValid": true, + "recordLinkFieldId": "fldNvFMYxBnf35WkO", + "fieldIdInLinkedTable": "fldI5gjLnq0RXUleV", + "result": { + "type": "multipleSelects", + "options": { + "choices": [ + { + "id": "selX4cKixuDso5PEX", + "name": "One", + "color": "blueLight2" + }, + { + "id": "seloqP1Gzj8Glrspo", + "name": "Two", + "color": "cyanLight2" + }, + { + "id": "selqPFVai30QE6Kp8", + "name": "Three", + "color": "tealLight2" + } + ] + } + } + }, + "id": "fldwYZ0pPzf0uvyg7", + "name": "Lookup Multi Select" +} diff --git a/tests/sample_data/field_schema/MultipleRecordLinksFieldSchema.json b/tests/sample_data/field_schema/MultipleRecordLinksFieldSchema.json new file mode 100644 index 00000000..fd325c16 --- /dev/null +++ b/tests/sample_data/field_schema/MultipleRecordLinksFieldSchema.json @@ -0,0 +1,10 @@ +{ + "type": "multipleRecordLinks", + "options": { + "linkedTableId": "tblFgcHgyO7LlhgUe", + "isReversed": false, + "prefersSingleRecordLink": false + }, + "id": "fldNvFMYxBnf35WkO", + "name": "Link to Self" +} diff --git a/tests/sample_data/field_schema/MultipleSelectsFieldSchema.json b/tests/sample_data/field_schema/MultipleSelectsFieldSchema.json new file mode 100644 index 00000000..ff9e227e --- /dev/null +++ b/tests/sample_data/field_schema/MultipleSelectsFieldSchema.json @@ -0,0 +1,24 @@ +{ + "type": "multipleSelects", + "options": { + "choices": [ + { + "id": "selX4cKixuDso5PEX", + "name": "One", + "color": "blueLight2" + }, + { + "id": "seloqP1Gzj8Glrspo", + "name": "Two", + "color": "cyanLight2" + }, + { + "id": "selqPFVai30QE6Kp8", + "name": "Three", + "color": "tealLight2" + } + ] + }, + "id": "fldI5gjLnq0RXUleV", + "name": "Tags" +} diff --git a/tests/sample_data/field_schema/NumberFieldSchema.json b/tests/sample_data/field_schema/NumberFieldSchema.json new file mode 100644 index 00000000..8d8bf432 --- /dev/null +++ b/tests/sample_data/field_schema/NumberFieldSchema.json @@ -0,0 +1,8 @@ +{ + "type": "number", + "options": { + "precision": 0 + }, + "id": "fldrkqRoHDtIko3wc", + "name": "Integer" +} diff --git a/tests/sample_data/field_schema/PercentFieldSchema.json b/tests/sample_data/field_schema/PercentFieldSchema.json new file mode 100644 index 00000000..0935f448 --- /dev/null +++ b/tests/sample_data/field_schema/PercentFieldSchema.json @@ -0,0 +1,8 @@ +{ + "type": "percent", + "options": { + "precision": 0 + }, + "id": "fldLETyD8dPtNqyIi", + "name": "Percent" +} diff --git a/tests/sample_data/field_schema/PhoneNumberFieldSchema.json b/tests/sample_data/field_schema/PhoneNumberFieldSchema.json new file mode 100644 index 00000000..d7d122be --- /dev/null +++ b/tests/sample_data/field_schema/PhoneNumberFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "phoneNumber", + "id": "fld94Wcs7zIYcWsem", + "name": "Phone" +} diff --git a/tests/sample_data/field_schema/RatingFieldSchema.json b/tests/sample_data/field_schema/RatingFieldSchema.json new file mode 100644 index 00000000..2c58f02a --- /dev/null +++ b/tests/sample_data/field_schema/RatingFieldSchema.json @@ -0,0 +1,10 @@ +{ + "type": "rating", + "options": { + "icon": "star", + "max": 5, + "color": "yellowBright" + }, + "id": "fldlvlcSYXyatQSZI", + "name": "Stars" +} diff --git a/tests/sample_data/field_schema/RichTextFieldSchema.json b/tests/sample_data/field_schema/RichTextFieldSchema.json new file mode 100644 index 00000000..c67ca739 --- /dev/null +++ b/tests/sample_data/field_schema/RichTextFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "richText", + "id": "fldEHLmq3SvZCvgOT", + "name": "Notes" +} diff --git a/tests/sample_data/field_schema/RollupFieldSchema.json b/tests/sample_data/field_schema/RollupFieldSchema.json new file mode 100644 index 00000000..8615a02a --- /dev/null +++ b/tests/sample_data/field_schema/RollupFieldSchema.json @@ -0,0 +1,17 @@ +{ + "type": "rollup", + "options": { + "isValid": true, + "recordLinkFieldId": "fldNvFMYxBnf35WkO", + "fieldIdInLinkedTable": "fld4QpwrhRppNhjFM", + "referencedFieldIds": [], + "result": { + "type": "number", + "options": { + "precision": 0 + } + } + }, + "id": "fldSTbXQ3nFW18CG5", + "name": "Rollup Error" +} diff --git a/tests/sample_data/field_schema/SingleCollaboratorFieldSchema.json b/tests/sample_data/field_schema/SingleCollaboratorFieldSchema.json new file mode 100644 index 00000000..d64d3a75 --- /dev/null +++ b/tests/sample_data/field_schema/SingleCollaboratorFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "singleCollaborator", + "id": "fldx2Pz3OV1ikvqFw", + "name": "Assignee" +} diff --git a/tests/sample_data/field_schema/SingleLineTextFieldSchema.json b/tests/sample_data/field_schema/SingleLineTextFieldSchema.json new file mode 100644 index 00000000..0b2a89c4 --- /dev/null +++ b/tests/sample_data/field_schema/SingleLineTextFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "singleLineText", + "id": "fldeica4zrxCGxsl1", + "name": "Name" +} diff --git a/tests/sample_data/field_schema/SingleSelectFieldSchema.json b/tests/sample_data/field_schema/SingleSelectFieldSchema.json new file mode 100644 index 00000000..632709f1 --- /dev/null +++ b/tests/sample_data/field_schema/SingleSelectFieldSchema.json @@ -0,0 +1,24 @@ +{ + "type": "singleSelect", + "options": { + "choices": [ + { + "id": "selSwz6Sw0qCkFTll", + "name": "Todo", + "color": "redLight2" + }, + { + "id": "selTYUU62nFD9JX50", + "name": "In progress", + "color": "yellowLight2" + }, + { + "id": "selvoFITCGqwGxYtB", + "name": "Done", + "color": "greenLight2" + } + ] + }, + "id": "fldqCjrs1UhXgHUIc", + "name": "Status" +} diff --git a/tests/sample_data/field_schema/UnknownFieldSchema.json b/tests/sample_data/field_schema/UnknownFieldSchema.json new file mode 100644 index 00000000..460b7b23 --- /dev/null +++ b/tests/sample_data/field_schema/UnknownFieldSchema.json @@ -0,0 +1,9 @@ +{ + "type": "somethingUnrecognizable", + "id": "fld8cfZQtRNP5OXu9", + "name": "Unknown Field Type", + "options": { + "something": "wacky", + "and": ["unusual"] + } +} diff --git a/tests/sample_data/field_schema/UrlFieldSchema.json b/tests/sample_data/field_schema/UrlFieldSchema.json new file mode 100644 index 00000000..84a68631 --- /dev/null +++ b/tests/sample_data/field_schema/UrlFieldSchema.json @@ -0,0 +1,5 @@ +{ + "type": "url", + "id": "fld8cfZQtRNP5OXu9", + "name": "URL" +} diff --git a/tests/test_api_api.py b/tests/test_api_api.py index 9dcb4c17..541804dc 100644 --- a/tests/test_api_api.py +++ b/tests/test_api_api.py @@ -1,4 +1,13 @@ -from pyairtable import Api, Base, Table # noqa +from unittest import mock + +import pytest + +from pyairtable import Api, Base, Table + + +@pytest.fixture +def mock_bases_endpoint(api, requests_mock, sample_json): + return requests_mock.get(api.urls.bases, json=sample_json("Bases")) def test_repr(api): @@ -57,3 +66,117 @@ def test_whoami(api, requests_mock): } requests_mock.get("https://api.airtable.com/v0/meta/whoami", json=payload) assert api.whoami() == payload + + +@pytest.mark.parametrize("base_id", ("appLkNDICXNqxSDhG", "Apartment Hunting")) +def test_base(api, base_id, mock_bases_endpoint): + # test behavior without validation + base = api.base(base_id) + assert base.id == base_id + assert base.name is None + assert base.permission_level is None + assert mock_bases_endpoint.call_count == 0 + + # test behavior with validation + base = api.base(base_id, validate=True) + assert base.id == "appLkNDICXNqxSDhG" + assert base.name == "Apartment Hunting" + assert base.permission_level == "create" + assert mock_bases_endpoint.call_count == 1 + + # calling a second time uses cached information... + api.base(base_id, validate=True) + assert mock_bases_endpoint.call_count == 1 + + # ...unless we also pass force=True + base = api.base(base_id, validate=True, force=True) + assert mock_bases_endpoint.call_count == 2 + + +def test_bases(api, mock_bases_endpoint): + base_ids = [base.id for base in api.bases()] + assert mock_bases_endpoint.call_count == 1 + assert base_ids == ["appLkNDICXNqxSDhG", "appSW9R5uCNmRmfl6"] + + # Should not make a second API call... + assert [base.id for base in api.bases()] == base_ids + assert mock_bases_endpoint.call_count == 1 + # ....unless we force it: + reloaded = api.bases(force=True) + assert [base.id for base in reloaded] == base_ids + assert mock_bases_endpoint.call_count == 2 + + +def test_iterate_requests(api: Api, requests_mock): + url = "https://example.com" + response_list = [{"json": {"page": n, "offset": n + 1}} for n in range(1, 3)] + response_list[-1]["json"]["offset"] = None + requests_mock.get(url, response_list=response_list) + responses = list(api.iterate_requests("GET", url)) + assert responses == [response["json"] for response in response_list] + + +def test_iterate_requests__post(api: Api, requests_mock): + url = "https://example.com" + # prepare a few pages of dummy responses + response_list = [ + {"json": {"page": 0, "offset": 1}}, + {"json": {"page": 1, "offset": 2}}, + {"json": {"page": 2}}, + ] + m = requests_mock.post(url, response_list=response_list) + # construct a request that will get converted from GET to POST + formula = "X" * (api.MAX_URL_LENGTH + 1) + pages = list( + api.iterate_requests( + "GET", + url=url, + fallback=("POST", url), + options={"formula": formula}, + ) + ) + # ensure we got the responses we expected + assert pages == [ + {"page": 0, "offset": 1}, + {"page": 1, "offset": 2}, + {"page": 2}, + ] + # ensure we made three POST requests + assert m.call_count == 3 + assert len(pages) == 3 + requests = [r.json() for r in m.request_history] + assert requests == [ + {"filterByFormula": formula}, + {"filterByFormula": formula, "offset": "1"}, + {"filterByFormula": formula, "offset": "2"}, + ] + + +def test_iterate_requests__invalid_type(api: Api, requests_mock): + url = "https://example.com" + response_list = [{"json": {"page": n, "offset": n + 1}} for n in range(1, 3)] + response_list.append({"json": "anything but a dict, and we stop immediately"}) + requests_mock.get(url, response_list=response_list) + responses = list(api.iterate_requests("GET", url)) + assert responses == [response["json"] for response in response_list] + + +def test_workspace(api): + assert api.workspace("wspFake").id == "wspFake" + + +def test_enterprise(api, requests_mock, sample_json): + url = api.build_url("meta/enterpriseAccount/entUBq2RGdihxl3vU") + requests_mock.get(url, json=sample_json("EnterpriseInfo")) + enterprise = api.enterprise("entUBq2RGdihxl3vU") + assert enterprise.id == "entUBq2RGdihxl3vU" + + +def test_create_base(api): + """ + Test that Api.create_base is a passthrough to Workspace.create_base + """ + with mock.patch("pyairtable.Workspace.create_base") as m: + api.create_base("wspFake", "Fake Name", []) + + m.assert_called_once_with("Fake Name", []) diff --git a/tests/test_api_base.py b/tests/test_api_base.py index 3dee5131..def883c0 100644 --- a/tests/test_api_base.py +++ b/tests/test_api_base.py @@ -1,11 +1,17 @@ import datetime import pytest +from requests import HTTPError from pyairtable import Base, Table from pyairtable.testing import fake_id +@pytest.fixture +def mock_tables_endpoint(base, requests_mock, sample_json): + return requests_mock.get(base.urls.tables, json=sample_json("BaseSchema")) + + def test_constructor(api): base = Base(api, "base_id") assert base.api == api @@ -30,21 +36,96 @@ def test_invalid_constructor(): Base("api_key", "base_id", timeout=(1, 1)) -def test_repr(base): - assert "Base" in base.__repr__() +@pytest.mark.parametrize( + "kwargs,expected", + [ + ( + dict(base_id="appFake"), + "", + ), + ( + dict(base_id="appFake", name="Some name"), + "", + ), + ( + dict(base_id="appFake", permission_level="editor"), + "", + ), + ], +) +def test_repr(api, kwargs, expected): + base = Base(api, **kwargs) + assert repr(base) == expected + + +def test_schema(base: Base, mock_tables_endpoint): + table_schema = base.schema().table("tbltp8DGLhqbUmjK1") + assert table_schema.name == "Apartments" + assert mock_tables_endpoint.call_count == 1 + # Test that we cache the result unless force=True + base.schema() + assert mock_tables_endpoint.call_count == 1 + base.schema(force=True) + assert mock_tables_endpoint.call_count == 2 -def test_get_table(base: Base): + +def test_table(base: Base, requests_mock): + # no network traffic expected; requests_mock will fail if it happens rv = base.table("tablename") assert isinstance(rv, Table) assert rv.base == base assert rv.name == "tablename" - assert rv.url == f"https://api.airtable.com/v0/{base.id}/tablename" + assert rv.urls.records == f"https://api.airtable.com/v0/{base.id}/tablename" + + +def test_table_validate(base: Base, mock_tables_endpoint): + """ + Test that Base.table(..., validate=True) allows us to look up a table + by either ID or name and get the correct properties. + """ + # It will reuse the cached schema when validate=True is called multiple times... + base.table("tbltp8DGLhqbUmjK1", validate=True) + base.table("Apartments", validate=True) + assert mock_tables_endpoint.call_count == 1 + # ...unless we also pass force=True + base.table("Apartments", validate=True, force=True) + assert mock_tables_endpoint.call_count == 2 + + +def test_table__invalid(base, mock_tables_endpoint): + # validate=True will raise an exception if called with an invalid ID/name: + with pytest.raises(KeyError): + base.table("DoesNotExist", validate=True) + + +def test_tables(base: Base, mock_tables_endpoint): + """ + Test that Base.tables() returns a list of validated Base instances. + """ + result = base.tables() + assert len(result) == 2 + assert result[0].name == "Apartments" + assert result[1].name == "Districts" + + +def test_collaborators(base: Base, requests_mock, sample_json): + requests_mock.get(base.urls.meta, json=sample_json("BaseCollaborators")) + result = base.collaborators() + assert result.individual_collaborators.via_base[0].email == "foo@bam.com" + assert result.group_collaborators.via_workspace[0].group_id == "ugp1mKGb3KXUyQfOZ" + + +def test_shares(base: Base, requests_mock, sample_json): + requests_mock.get(base.urls.shares, json=sample_json("BaseShares")) + result = base.shares() + assert result[0].state == "enabled" + assert result[1].effective_email_domain_allow_list == [] def test_webhooks(base: Base, requests_mock, sample_json): m = requests_mock.get( - base.webhooks_url, + base.urls.webhooks, json={"webhooks": [sample_json("Webhook")]}, ) webhooks = base.webhooks() @@ -54,6 +135,15 @@ def test_webhooks(base: Base, requests_mock, sample_json): assert webhooks[0].last_notification_result.error +def test_webhook(base: Base, requests_mock, sample_json): + requests_mock.get(base.urls.webhooks, json={"webhooks": [sample_json("Webhook")]}) + webhook = base.webhook("ach00000000000001") + assert webhook.id == "ach00000000000001" + assert webhook.notification_url == "https://example.com/receive-ping" + with pytest.raises(KeyError): + base.webhook("DoesNotExist") + + def test_add_webhook(base: Base, requests_mock): def _callback(request, context): expires = datetime.datetime.now() + datetime.timedelta(days=7) @@ -77,7 +167,7 @@ def _callback(request, context): } } } - m = requests_mock.post(base.webhooks_url, json=_callback) + m = requests_mock.post(base.urls.webhooks, json=_callback) result = base.add_webhook("https://example.com/cb", spec) assert m.call_count == 1 @@ -85,3 +175,124 @@ def _callback(request, context): assert m.last_request.json()["specification"] == spec assert result.id.startswith("ach") assert result.mac_secret_base64 == "secret" + + +def test_name(api, base, requests_mock): + """ + Test that Base().name is only set if passed explicitly to the constructor, + or if it is available in cached schema information. + """ + requests_mock.get( + base.urls.meta, + json={ + "id": base.id, + "createdTime": "2021-01-01T00:00:00.000Z", + "name": "Mocked Base Name", + "permissionLevel": "create", + "workspaceId": "wspFake", + }, + ) + + assert api.base(base.id).name is None + assert base.name is None + assert base.collaborators().name == "Mocked Base Name" + assert base.name == "Mocked Base Name" + + # Test behavior with older constructor pattern + with pytest.warns(DeprecationWarning): + assert Base("tok", "app").name is None + with pytest.warns(DeprecationWarning): + assert Base("tok", "app", name="Base Name").name == "Base Name" + + +def test_create_table(base, requests_mock, mock_tables_endpoint): + """ + Test that Base.create_table() makes two calls, one to create the table, + and another to re-retrieve the entire base's schema. + """ + m = requests_mock.post(mock_tables_endpoint._url, json={"id": "tblWasJustCreated"}) + mock_tables_endpoint._responses[0]._params["json"]["tables"].append( + { + "id": "tblWasJustCreated", + "name": "Table Name", + "primaryFieldId": "fldWasJustCreated", + "fields": [ + { + "id": "fldWasJustCreated", + "name": "Whatever", + "type": "singleLineText", + } + ], + "views": [], + } + ) + table = base.create_table( + "Table Name", + fields=[{"name": "Whatever"}], + description="Description", + ) + assert m.call_count == 1 + assert m.request_history[-1].json() == { + "name": "Table Name", + "description": "Description", + "fields": [{"name": "Whatever"}], + } + + assert isinstance(table, Table) + assert table.id == "tblWasJustCreated" + assert table.name == "Table Name" + assert table.schema().primary_field_id == "fldWasJustCreated" + + +def test_schema_refresh(base, requests_mock, mock_tables_endpoint): + """ + Test that base.schema() is forced to refresh on table create. + """ + + schema_id = id(base.schema()) + + m = requests_mock.post(mock_tables_endpoint._url, json={"id": "tblWasJustCreated"}) + mock_tables_endpoint._responses[0]._params["json"]["tables"].append( + { + "id": "tblWasJustCreated", + "name": "Table Name", + "primaryFieldId": "fldWasJustCreated", + "fields": [ + { + "id": "fldWasJustCreated", + "name": "Whatever", + "type": "singleLineText", + } + ], + "views": [], + } + ) + + table = base.create_table( + "Table Name", + fields=[{"name": "Whatever"}], + description="Description", + ) + + assert m.call_count == 1 + assert table.id == "tblWasJustCreated" + assert not id(base.schema()) == schema_id + + +def test_delete(base, requests_mock): + """ + Test that Base.delete() hits the right endpoint. + """ + m = requests_mock.delete(base.urls.meta, json={"id": base.id, "deleted": True}) + base.delete() + assert m.call_count == 1 + + +def test_delete__enterprise_only_table(api, base, requests_mock): + """ + Test that Base.delete() explains why it might be getting a 404. + """ + requests_mock.delete(base.urls.meta, status_code=404) + with pytest.raises(HTTPError) as excinfo: + base.delete() + assert "Base.delete() requires an enterprise billing plan" in str(excinfo) diff --git a/tests/test_api_enterprise.py b/tests/test_api_enterprise.py new file mode 100644 index 00000000..2c577ae1 --- /dev/null +++ b/tests/test_api_enterprise.py @@ -0,0 +1,496 @@ +from datetime import datetime, timezone +from unittest.mock import Mock, call, patch + +import pytest + +from pyairtable.api.enterprise import ( + DeleteUsersResponse, + Enterprise, + ManageUsersResponse, +) +from pyairtable.models.schema import EnterpriseInfo, UserGroup, UserInfo +from pyairtable.testing import fake_id + +N_AUDIT_PAGES = 15 +N_AUDIT_PAGE_SIZE = 10 + + +@pytest.fixture(autouse=True) +def enterprise_mocks(enterprise, requests_mock, sample_json): + m = Mock() + m.json_user = sample_json("UserInfo") + m.json_users = {"users": [m.json_user]} + m.json_group = sample_json("UserGroup") + m.json_enterprise = sample_json("EnterpriseInfo") + m.user_id = m.json_user["id"] + m.group_id = m.json_group["id"] + m.get_info = requests_mock.get( + enterprise_url := "https://api.airtable.com/v0/meta/enterpriseAccounts/entUBq2RGdihxl3vU", + json=m.json_enterprise, + ) + m.get_user = requests_mock.get( + f"{enterprise_url}/users/usrL2PNC5o3H4lBEi", + json=m.json_user, + ) + m.get_users = requests_mock.get( + f"{enterprise_url}/users", + json=m.json_users, + ) + m.get_group = requests_mock.get( + "https://api.airtable.com/v0/meta/groups/ugp1mKGb3KXUyQfOZ", + json=m.json_group, + ) + m.get_audit_log = requests_mock.get( + f"{enterprise_url}/auditLogEvents", + response_list=[ + { + "json": { + "events": fake_audit_log_events(n), + "pagination": ( + None if n == N_AUDIT_PAGES - 1 else {"previous": "dummy"} + ), + } + } + for n in range(N_AUDIT_PAGES) + ], + ) + m.remove_user = requests_mock.post( + f"{enterprise_url}/users/{m.user_id}/remove", + json=sample_json("UserRemoved"), + ) + m.claim_users = requests_mock.post( + f"{enterprise_url}/claim/users", + json={"errors": []}, + ) + m.create_descendants = requests_mock.post( + f"{enterprise_url}/descendants", json={"id": fake_id("ent")} + ) + m.move_groups_json = {} + m.move_groups = requests_mock.post( + f"{enterprise_url}/moveGroups", json=m.move_groups_json + ) + m.move_workspaces_json = {} + m.move_workspaces = requests_mock.post( + f"{enterprise_url}/moveWorkspaces", json=m.move_workspaces_json + ) + return m + + +def fake_audit_log_events(counter, page_size=N_AUDIT_PAGE_SIZE): + return [ + { + "id": str(counter * 1000 + n), + "timestamp": datetime.now().isoformat(), + "action": "viewBase", + "actor": {"type": "anonymousUser"}, + "modelId": (base_id := fake_id("app")), + "modelType": "base", + "payload": {"name": "The Base Name"}, + "payloadVersion": "1.0", + "context": { + "baseId": base_id, + "actionId": fake_id("act"), + "enterpriseAccountId": fake_id("ent"), + "workspaceId": fake_id("wsp"), + }, + "origin": {"ipAddress": "8.8.8.8", "userAgent": "Internet Explorer"}, + } + for n in range(page_size) + ] + + +def test_info(enterprise, enterprise_mocks): + assert isinstance(info := enterprise.info(), EnterpriseInfo) + assert info.id == "entUBq2RGdihxl3vU" + assert info.workspace_ids == ["wspmhESAta6clCCwF", "wspHvvm4dAktsStZH"] + assert info.email_domains[0].is_sso_required is True + assert enterprise_mocks.get_info.call_count == 1 + + assert enterprise.info(force=True).id == "entUBq2RGdihxl3vU" + assert enterprise_mocks.get_info.call_count == 2 + assert "aggregated" not in enterprise_mocks.get_info.last_request.qs + assert "descendants" not in enterprise_mocks.get_info.last_request.qs + + +def test_info__aggregated_descendants(enterprise, enterprise_mocks): + enterprise_mocks.json_enterprise["aggregated"] = { + "groupIds": ["ugp1mKGb3KXUyQfOZ"], + "userIds": ["usrL2PNC5o3H4lBEi"], + "workspaceIds": ["wspmhESAta6clCCwF"], + } + enterprise_mocks.json_enterprise["descendants"] = { + (sub_ent_id := fake_id("ent")): { + "groupIds": ["ugp1mKGb3KXUyDESC"], + "userIds": ["usrL2PNC5o3H4DESC"], + "workspaceIds": ["wspmhESAta6clDESC"], + } + } + info = enterprise.info(aggregated=True, descendants=True) + assert enterprise_mocks.get_info.call_count == 1 + assert enterprise_mocks.get_info.last_request.qs["include"] == [ + "aggregated", + "descendants", + ] + assert info.aggregated.group_ids == ["ugp1mKGb3KXUyQfOZ"] + assert info.aggregated.user_ids == ["usrL2PNC5o3H4lBEi"] + assert info.aggregated.workspace_ids == ["wspmhESAta6clCCwF"] + assert info.descendants[sub_ent_id].group_ids == ["ugp1mKGb3KXUyDESC"] + assert info.descendants[sub_ent_id].user_ids == ["usrL2PNC5o3H4DESC"] + assert info.descendants[sub_ent_id].workspace_ids == ["wspmhESAta6clDESC"] + + +def test_user(enterprise, enterprise_mocks): + user = enterprise.user(enterprise_mocks.user_id) + assert isinstance(user, UserInfo) + assert enterprise_mocks.get_users.call_count == 1 + assert user.collaborations + assert "appLkNDICXNqxSDhG" in user.collaborations.bases + assert "pbdyGA3PsOziEHPDE" in user.collaborations.interfaces + assert "wspmhESAta6clCCwF" in user.collaborations.workspaces + + +def test_user__no_collaboration(enterprise, enterprise_mocks): + del enterprise_mocks.json_users["users"][0]["collaborations"] + + user = enterprise.user(enterprise_mocks.user_id, collaborations=False) + assert isinstance(user, UserInfo) + assert enterprise_mocks.get_users.call_count == 1 + assert not enterprise_mocks.get_users.last_request.qs.get("include") + assert not user.collaborations # test for Collaborations.__bool__ + assert not user.collaborations.bases + assert not user.collaborations.interfaces + assert not user.collaborations.workspaces + + +def test_user__descendants(enterprise, enterprise_mocks): + enterprise_mocks.json_users["users"][0]["descendants"] = { + (other_ent_id := fake_id("ent")): { + "lastActivityTime": "2021-01-01T12:34:56Z", + "isAdmin": True, + "groups": [{"id": (fake_group_id := fake_id("ugp"))}], + } + } + user = enterprise.user(enterprise_mocks.user_id, descendants=True) + d = user.descendants[other_ent_id] + assert d.last_activity_time == datetime(2021, 1, 1, 12, 34, 56, tzinfo=timezone.utc) + assert d.is_admin is True + assert d.groups[0].id == fake_group_id + + +def test_user__aggregates(enterprise, enterprise_mocks): + enterprise_mocks.json_users["users"][0]["aggregated"] = { + "lastActivityTime": "2021-01-01T12:34:56Z", + "isAdmin": True, + "groups": [{"id": (fake_group_id := fake_id("ugp"))}], + } + user = enterprise.user(enterprise_mocks.user_id, aggregated=True) + a = user.aggregated + assert a.last_activity_time == datetime(2021, 1, 1, 12, 34, 56, tzinfo=timezone.utc) + assert a.is_admin is True + assert a.groups[0].id == fake_group_id + + +@pytest.mark.parametrize( + "search_for", + ( + ["usrL2PNC5o3H4lBEi"], + ["foo@bar.com"], + ["usrL2PNC5o3H4lBEi", "foo@bar.com"], # should not return duplicates + ), +) +def test_users(enterprise, search_for): + results = enterprise.users(search_for) + assert len(results) == 1 + assert isinstance(user := results[0], UserInfo) + assert user.id == "usrL2PNC5o3H4lBEi" + assert user.state == "provisioned" + + +def test_users__descendants(enterprise, enterprise_mocks): + enterprise_mocks.json_users["users"][0]["descendants"] = { + (other_ent_id := fake_id("ent")): { + "lastActivityTime": "2021-01-01T12:34:56Z", + "isAdmin": True, + "groups": [{"id": (fake_group_id := fake_id("ugp"))}], + } + } + users = enterprise.users([enterprise_mocks.user_id], descendants=True) + assert len(users) == 1 + d = users[0].descendants[other_ent_id] + assert d.last_activity_time == datetime(2021, 1, 1, 12, 34, 56, tzinfo=timezone.utc) + assert d.is_admin is True + assert d.groups[0].id == fake_group_id + + +def test_users__aggregates(enterprise, enterprise_mocks): + enterprise_mocks.json_users["users"][0]["aggregated"] = { + "lastActivityTime": "2021-01-01T12:34:56Z", + "isAdmin": True, + "groups": [{"id": (fake_group_id := fake_id("ugp"))}], + } + users = enterprise.users([enterprise_mocks.user_id], aggregated=True) + assert len(users) == 1 + a = users[0].aggregated + assert a.last_activity_time == datetime(2021, 1, 1, 12, 34, 56, tzinfo=timezone.utc) + assert a.is_admin is True + assert a.groups[0].id == fake_group_id + + +def test_group(enterprise, enterprise_mocks): + grp = enterprise.group("ugp1mKGb3KXUyQfOZ") + assert enterprise_mocks.get_group.call_count == 1 + assert isinstance(grp, UserGroup) + assert grp.id == "ugp1mKGb3KXUyQfOZ" + assert grp.name == "Group name" + assert grp.members[0].email == "foo@bar.com" + assert grp.collaborations + assert "appLkNDICXNqxSDhG" in grp.collaborations.bases + assert "pbdyGA3PsOziEHPDE" in grp.collaborations.interfaces + assert "wspmhESAta6clCCwF" in grp.collaborations.workspaces + + +def test_group__no_collaboration(enterprise, enterprise_mocks): + del enterprise_mocks.json_group["collaborations"] + + grp = enterprise.group(enterprise_mocks.group_id, collaborations=False) + assert isinstance(grp, UserGroup) + assert enterprise_mocks.get_group.call_count == 1 + assert not enterprise_mocks.get_group.last_request.qs.get("include") + assert not grp.collaborations # test for Collaborations.__bool__ + assert not grp.collaborations.bases + assert not grp.collaborations.interfaces + assert not grp.collaborations.workspaces + + +@pytest.mark.parametrize( + "fncall,expected_size", + [ + (call(), N_AUDIT_PAGES * N_AUDIT_PAGE_SIZE), + (call(page_limit=1), N_AUDIT_PAGE_SIZE), + ], +) +def test_audit_log(enterprise, fncall, expected_size): + """ + Test that we iterate through multiple pages of the audit log. correctly + """ + events = [ + event + for page in enterprise.audit_log(*fncall.args, **fncall.kwargs) + for event in page.events + ] + assert len(events) == expected_size + + +def test_audit_log__no_loop(enterprise, requests_mock): + """ + Test that an empty page of events does not cause an infinite loop. + """ + requests_mock.get( + enterprise.api.build_url( + f"meta/enterpriseAccounts/{enterprise.id}/auditLogEvents" + ), + json={ + "events": [], + "pagination": {"previous": "dummy"}, + }, + ) + events = [event for page in enterprise.audit_log() for event in page.events] + assert len(events) == 0 + + +@pytest.mark.parametrize( + "fncall,sortorder,offset_field", + [ + (call(), "descending", "previous"), + (call(sort_asc=True), "ascending", "next"), + ], +) +def test_audit_log__sortorder( + api, + enterprise, + enterprise_mocks, + fncall, + sortorder, + offset_field, +): + """ + Test that we calculate sortorder and offset_field correctly + dpeending on whether we're ascending or descending. + """ + with patch.object(api, "iterate_requests", wraps=api.iterate_requests) as m: + list(enterprise.audit_log(*fncall.args, **fncall.kwargs)) + + request = enterprise_mocks.get_audit_log.last_request + assert request.qs["sortOrder"] == [sortorder] + assert m.mock_calls[-1].kwargs["offset_field"] == offset_field + + +@pytest.mark.parametrize( + "kwargs,expected", + [ + ( + {}, + {"isDryRun": False}, + ), + ( + {"replacement": "otherUser"}, + {"isDryRun": False, "replacementOwnerId": "otherUser"}, + ), + ( + {"descendants": True}, + {"isDryRun": False, "removeFromDescendants": True}, + ), + ], +) +def test_remove_user(enterprise, enterprise_mocks, kwargs, expected): + removed = enterprise.remove_user(enterprise_mocks.user_id, **kwargs) + assert enterprise_mocks.remove_user.call_count == 1 + assert enterprise_mocks.remove_user.last_request.json() == expected + assert removed.shared.workspaces[0].user_id == "usrL2PNC5o3H4lBEi" + + +@pytest.fixture +def user_info(enterprise, enterprise_mocks): + user_info = enterprise.user(enterprise_mocks.user_id) + assert user_info._url == f"{enterprise.urls.users}/{user_info.id}" + return user_info + + +def test_delete_user(user_info, requests_mock): + m = requests_mock.delete(user_info._url) + user_info.delete() + assert m.call_count == 1 + + +def test_manage_user(user_info, requests_mock): + m = requests_mock.patch(user_info._url) + user_info.save() + assert m.call_count == 1 + assert m.last_request.json() == {"email": "foo@bar.com", "state": "provisioned"} + + +def test_logout_user(user_info, requests_mock): + m = requests_mock.post(user_info._url + "/logout") + user_info.logout() + assert m.call_count == 1 + assert m.last_request.body is None + + +def test_claim_users(enterprise, enterprise_mocks): + result = enterprise.claim_users( + { + "usrFakeUserId": "managed", + "someone@example.com": "unmanaged", + } + ) + assert isinstance(result, ManageUsersResponse) + assert enterprise_mocks.claim_users.call_count == 1 + assert enterprise_mocks.claim_users.last_request.json() == { + "users": [ + {"id": "usrFakeUserId", "state": "managed"}, + {"email": "someone@example.com", "state": "unmanaged"}, + ] + } + + +def test_delete_users(enterprise, requests_mock): + response = { + "deletedUsers": [{"email": "foo@bar.com", "id": "usrL2PNC5o3H4lBEi"}], + "errors": [ + { + "email": "bar@bam.com", + "message": "Invalid permissions", + "type": "INVALID_PERMISSIONS", + } + ], + } + emails = [f"foo{n}@bar.com" for n in range(5)] + m = requests_mock.delete(enterprise.urls.users, json=response) + parsed = enterprise.delete_users(emails) + assert m.call_count == 1 + assert m.last_request.qs == {"email": emails} + assert isinstance(parsed, DeleteUsersResponse) + assert parsed.deleted_users[0].email == "foo@bar.com" + assert parsed.errors[0].type == "INVALID_PERMISSIONS" + + +@pytest.mark.parametrize("action", ["grant", "revoke"]) +def test_manage_admin_access(enterprise, enterprise_mocks, requests_mock, action): + user = enterprise.user(enterprise_mocks.user_id) + m = requests_mock.post(enterprise.urls.admin_access(action), json={}) + method = getattr(enterprise, f"{action}_admin") + result = method( + fake_user_id := fake_id("usr"), + fake_email := "fake@example.com", + user, + ) + assert isinstance(result, ManageUsersResponse) + assert m.call_count == 1 + assert m.last_request.json() == { + "users": [ + {"id": fake_user_id}, + {"email": fake_email}, + {"id": user.id}, + ] + } + + +def test_create_descendant(enterprise, enterprise_mocks): + descendant = enterprise.create_descendant("Some name") + assert enterprise_mocks.create_descendants.call_count == 1 + assert enterprise_mocks.create_descendants.last_request.json() == { + "name": "Some name" + } + assert isinstance(descendant, Enterprise) + + +def test_create_workspace(enterprise, requests_mock): + from pyairtable.api.workspace import Workspace + + workspace_id = fake_id("wsp") + m = requests_mock.post( + "https://api.airtable.com/v0/meta/workspaces", + json={"id": workspace_id}, + ) + result = enterprise.create_workspace("My New Workspace") + assert m.call_count == 1 + assert m.last_request.json() == { + "enterpriseAccountId": enterprise.id, + "name": "My New Workspace", + } + assert isinstance(result, Workspace) + assert result.id == workspace_id + + +def test_move_groups(api, enterprise, enterprise_mocks): + other_id = fake_id("ent") + group_ids = [fake_id("ugp") for _ in range(3)] + enterprise_mocks.move_groups_json["movedGroups"] = [ + {"id": group_id} for group_id in group_ids + ] + for target in [other_id, api.enterprise(other_id)]: + enterprise_mocks.move_groups.reset() + result = enterprise.move_groups(group_ids, target) + assert enterprise_mocks.move_groups.call_count == 1 + assert enterprise_mocks.move_groups.last_request.json() == { + "targetEnterpriseAccountId": other_id, + "groupIds": group_ids, + } + assert set(m.id for m in result.moved_groups) == set(group_ids) + + +def test_move_workspaces(api, enterprise, enterprise_mocks): + other_id = fake_id("ent") + workspace_ids = [fake_id("wsp") for _ in range(3)] + enterprise_mocks.move_workspaces_json["movedWorkspaces"] = [ + {"id": workspace_id} for workspace_id in workspace_ids + ] + for target in [other_id, api.enterprise(other_id)]: + enterprise_mocks.move_workspaces.reset() + result = enterprise.move_workspaces(workspace_ids, target) + assert enterprise_mocks.move_workspaces.call_count == 1 + assert enterprise_mocks.move_workspaces.last_request.json() == { + "targetEnterpriseAccountId": other_id, + "workspaceIds": workspace_ids, + } + assert set(m.id for m in result.moved_workspaces) == set(workspace_ids) diff --git a/tests/test_api_retrying.py b/tests/test_api_retrying.py index 46e6ad5d..e2122b6b 100644 --- a/tests/test_api_retrying.py +++ b/tests/test_api_retrying.py @@ -3,6 +3,7 @@ Instead we use a real HTTP server running in a separate thread, which we can program to respond with various HTTP status codes. """ + import json import threading import time @@ -162,6 +163,30 @@ def _table_with_retry(retry_strategy): return _table_with_retry +def test_without_retry_strategy__succeed( + table_with_retry_strategy, + mock_endpoint, + mock_response_single, +): + mock_endpoint.canned_responses = [(200, mock_response_single)] + table = table_with_retry_strategy(None) + assert table.get("record") == mock_response_single + + +def test_without_retry_strategy__fail( + table_with_retry_strategy, + mock_endpoint, + mock_response_single, +): + mock_endpoint.canned_responses = [ + (429, None), + (200, mock_response_single), + ] + table = table_with_retry_strategy(None) + with pytest.raises(requests.exceptions.HTTPError): + table.get("record") + + def test_retry_exceed(table_with_retry_strategy, mock_endpoint): """ Test that we raise a RetryError if we get too many retryable error codes. diff --git a/tests/test_api_table.py b/tests/test_api_table.py index 87888e4d..a2559dc9 100644 --- a/tests/test_api_table.py +++ b/tests/test_api_table.py @@ -1,13 +1,33 @@ -from posixpath import join as urljoin +from datetime import datetime, timezone +from unittest import mock import pytest from requests import Request from requests_mock import Mocker from pyairtable import Api, Base, Table -from pyairtable.testing import fake_record +from pyairtable.formulas import AND, EQ, Field +from pyairtable.models.schema import TableSchema +from pyairtable.testing import fake_attachment, fake_id, fake_record from pyairtable.utils import chunked +NOW = datetime.now(timezone.utc).isoformat() + + +@pytest.fixture() +def table_schema(sample_json, api, base) -> TableSchema: + return TableSchema.model_validate(sample_json("TableSchema")) + + +@pytest.fixture +def mock_table_schema(table, requests_mock, sample_json): + table_schema = sample_json("TableSchema") + table_schema["id"] = table.name = fake_id("tbl") + return requests_mock.get( + table.base.urls.tables + "?include=visibleFieldIds", + json={"tables": [table_schema]}, + ) + def test_constructor(base: Base): """ @@ -19,6 +39,20 @@ def test_constructor(base: Base): assert table.name == "table_name" +def test_constructor_with_schema(base: Base, table_schema: TableSchema): + table = Table(None, base, table_schema) + assert table.api == base.api + assert table.base == base + assert table.name == table_schema.name + assert ( + table.urls.records == f"https://api.airtable.com/v0/{base.id}/{table_schema.id}" + ) + assert ( + repr(table) + == f"
" + ) + + def test_deprecated_constructor(api: Api, base: Base): """ Test that "legacy" constructor (passing strings instead of instances) @@ -41,6 +75,7 @@ def test_invalid_constructor(api, base): [api, "base_id", "table_name"], ["api_key", base, "table_name"], [api, base, "table_name"], + [None, base, -1], ]: kwargs = args.pop() if isinstance(args[-1], dict) else {} with pytest.raises(TypeError): @@ -49,7 +84,34 @@ def test_invalid_constructor(api, base): def test_repr(table: Table): - assert repr(table) == "
" + assert repr(table) == "
" + + +def test_schema(base, requests_mock, sample_json): + """ + Test that we can load schema from API. + """ + table = base.table("Apartments") + m = requests_mock.get(base.urls.tables, json=sample_json("BaseSchema")) + assert isinstance(schema := table.schema(), TableSchema) + assert m.call_count == 1 + assert schema.id == "tbltp8DGLhqbUmjK1" + + +def test_id(base, requests_mock, sample_json): + """ + Test that we load schema from API if we need the ID and don't have it, + but if we get a name that *looks* like an ID, we trust it. + """ + m = requests_mock.get(base.urls.tables, json=sample_json("BaseSchema")) + + table = base.table("tbltp8DGLhqbUmjK1") + assert table.id == "tbltp8DGLhqbUmjK1" + assert m.call_count == 0 + + table = base.table("Apartments") + assert table.id == "tbltp8DGLhqbUmjK1" + assert m.call_count == 1 @pytest.mark.parametrize( @@ -62,7 +124,7 @@ def test_repr(table: Table): ) def test_url(api: Api, base_id, table_name, table_url_suffix): table = api.table(base_id, table_name) - assert table.url == f"https://api.airtable.com/v0/{table_url_suffix}" + assert table.urls.records == f"https://api.airtable.com/v0/{table_url_suffix}" def test_chunk(table: Table): @@ -73,11 +135,6 @@ def test_chunk(table: Table): assert chunks[3] == [3] -def test_record_url(table: Table): - rv = table.record_url("xxx") - assert rv == urljoin(table.url, "xxx") - - def test_api_key(table: Table, mock_response_single): def match_auth_header(request): expected_auth_header = "Bearer {}".format(table.api.api_key) @@ -88,7 +145,7 @@ def match_auth_header(request): with Mocker() as m: m.get( - table.record_url("rec"), + table.urls.record("rec"), status_code=200, json=mock_response_single, additional_matcher=match_auth_header, @@ -100,7 +157,7 @@ def match_auth_header(request): def test_get(table: Table, mock_response_single): _id = mock_response_single["id"] with Mocker() as mock: - mock.get(table.record_url(_id), status_code=200, json=mock_response_single) + mock.get(table.urls.record(_id), status_code=200, json=mock_response_single) resp = table.get(_id) assert dict_equals(resp, mock_response_single) @@ -108,7 +165,7 @@ def test_get(table: Table, mock_response_single): def test_first(table: Table, mock_response_single): mock_response = {"records": [mock_response_single]} with Mocker() as mock: - url = Request("get", table.url, params={"maxRecords": 1}).prepare().url + url = Request("get", table.urls.records, params={"maxRecords": 1}).prepare().url mock.get( url, status_code=200, @@ -122,7 +179,7 @@ def test_first(table: Table, mock_response_single): def test_first_via_post(table: Table, mock_response_single): mock_response = {"records": [mock_response_single]} with Mocker() as mock: - url = table.url + "/listRecords" + url = table.urls.records_post formula = f"RECORD_ID() != '{'x' * 17000}'" mock_endpoint = mock.post(url, status_code=200, json=mock_response) rv = table.first(formula=formula) @@ -138,7 +195,7 @@ def test_first_via_post(table: Table, mock_response_single): def test_first_none(table: Table, mock_response_single): mock_response = {"records": []} with Mocker() as mock: - url = Request("get", table.url, params={"maxRecords": 1}).prepare().url + url = Request("get", table.urls.records, params={"maxRecords": 1}).prepare().url mock.get( url, status_code=200, @@ -148,35 +205,67 @@ def test_first_none(table: Table, mock_response_single): assert rv is None -def test_all(table: Table, mock_response_list, mock_records): - with Mocker() as mock: - mock.get( - table.url, +def test_all(table, requests_mock, mock_response_list, mock_records): + requests_mock.get( + table.urls.records, + status_code=200, + json=mock_response_list[0], + complete_qs=True, + ) + for n, resp in enumerate(mock_response_list, 1): + offset = resp.get("offset", None) + if not offset: + continue + requests_mock.get( + table.urls.records.add_qs(offset=offset), status_code=200, - json=mock_response_list[0], + json=mock_response_list[1], complete_qs=True, ) - for n, resp in enumerate(mock_response_list, 1): - offset = resp.get("offset", None) - if not offset: - continue - offset_url = table.url + "?offset={}".format(offset) - mock.get( - offset_url, - status_code=200, - json=mock_response_list[1], - complete_qs=True, - ) - response = table.all() + + response = table.all() for n, resp in enumerate(response): assert dict_equals(resp, mock_records[n]) +@pytest.mark.parametrize( + "kwargs,expected", + [ + ({"view": "Grid view"}, {"view": ["Grid view"]}), + ({"page_size": 10}, {"pageSize": ["10"]}), + ({"max_records": 10}, {"maxRecords": ["10"]}), + ({"fields": ["Name", "Email"]}, {"fields[]": ["Name", "Email"]}), + ({"formula": "{Status}='Active'"}, {"filterByFormula": ["{Status}='Active'"]}), + ({"cell_format": "json"}, {"cellFormat": ["json"]}), + ({"user_locale": "en_US"}, {"userLocale": ["en_US"]}), + ({"time_zone": "America/New_York"}, {"timeZone": ["America/New_York"]}), + ({"use_field_ids": True}, {"returnFieldsByFieldId": ["1"]}), + ( + {"sort": ["Name", "-Email"]}, + { + "sort[0][direction]": ["asc"], + "sort[0][field]": ["Name"], + "sort[1][direction]": ["desc"], + "sort[1][field]": ["Email"], + }, + ), + ({"count_comments": True}, {"recordMetadata[]": ["commentCount"]}), + ], +) +def test_all__params(table, requests_mock, kwargs, expected): + """ + Test that parameters to all() get translated to query string correctly. + """ + m = requests_mock.get(table.urls.records, status_code=200, json={"records": []}) + table.all(**kwargs) + assert m.last_request.qs == expected + + def test_iterate(table: Table, mock_response_list, mock_records): with Mocker() as mock: mock.get( - table.url, + table.urls.records, status_code=200, json=mock_response_list[0], complete_qs=True, @@ -186,7 +275,7 @@ def test_iterate(table: Table, mock_response_list, mock_records): if not offset: continue params = {"offset": offset} - offset_url = Request("get", table.url, params=params).prepare().url + offset_url = Request("get", table.urls.records, params=params).prepare().url mock.get( offset_url, status_code=200, @@ -202,11 +291,60 @@ def test_iterate(table: Table, mock_response_list, mock_records): assert seq_equals(pages[n], response["records"]) +def test_iterate__formula_conversion(table): + """ + Test that .iterate() will convert a Formula to a str. + """ + with mock.patch("pyairtable.Api.iterate_requests") as m: + table.all(formula=AND(EQ(Field("Name"), "Alice"))) + + m.assert_called_once_with( + method="get", + url=table.urls.records, + fallback=mock.ANY, + options={ + "formula": "AND({Name}='Alice')", + }, + ) + + +def test_all__count_comments(table, requests_mock): + """ + Test that count_comments parameter properly includes commentCount. + """ + mock_response = { + "records": [ + { + "id": "recA", + "createdTime": "2023-01-01T00:00:00.000Z", + "fields": {"Name": "Alice"}, + "commentCount": 5, + }, + { + "id": "recB", + "createdTime": "2023-01-02T00:00:00.000Z", + "fields": {"Name": "Bob"}, + "commentCount": 0, + }, + ] + } + m = requests_mock.get(table.urls.records, status_code=200, json=mock_response) + records = table.all(count_comments=True) + + # Verify the request was made with the correct parameter + assert m.last_request.qs == {"recordMetadata[]": ["commentCount"]} + + # Verify the response includes commentCount + assert len(records) == 2 + assert records[0]["commentCount"] == 5 + assert records[1]["commentCount"] == 0 + + def test_create(table: Table, mock_response_single): with Mocker() as mock: post_data = mock_response_single["fields"] mock.post( - table.url, + table.urls.records, status_code=201, json=mock_response_single, additional_matcher=match_request_data(post_data), @@ -215,16 +353,17 @@ def test_create(table: Table, mock_response_single): assert dict_equals(resp, mock_response_single) -def test_batch_create(table: Table, mock_records): +@pytest.mark.parametrize("container", [list, tuple, iter]) +def test_batch_create(table: Table, container, mock_records): with Mocker() as mock: for chunk in _chunk(mock_records, 10): mock.post( - table.url, + table.urls.records, status_code=201, json={"records": chunk}, ) records = [i["fields"] for i in mock_records] - resp = table.batch_create(records) + resp = table.batch_create(container(records)) assert seq_equals(resp, mock_records) @@ -235,7 +374,7 @@ def test_update(table: Table, mock_response_single, replace, http_method): with Mocker() as mock: mock.register_uri( http_method, - urljoin(table.url, id_), + table.urls.record(id_), status_code=201, json=mock_response_single, additional_matcher=match_request_data(post_data), @@ -244,24 +383,26 @@ def test_update(table: Table, mock_response_single, replace, http_method): assert dict_equals(resp, mock_response_single) +@pytest.mark.parametrize("container", [list, tuple, iter]) @pytest.mark.parametrize("replace,http_method", [(False, "PATCH"), (True, "PUT")]) -def test_batch_update(table: Table, replace, http_method): +def test_batch_update(table: Table, container, replace, http_method): records = [fake_record(fieldvalue=index) for index in range(50)] with Mocker() as mock: mock.register_uri( http_method, - table.url, + table.urls.records, response_list=[ {"json": {"records": chunk}} for chunk in table.api.chunked(records) ], ) - resp = table.batch_update(records, replace=replace) + resp = table.batch_update(container(records), replace=replace) assert resp == records +@pytest.mark.parametrize("container", [list, tuple, iter]) @pytest.mark.parametrize("replace,http_method", [(False, "PATCH"), (True, "PUT")]) -def test_batch_upsert(table: Table, replace, http_method, monkeypatch): +def test_batch_upsert(table: Table, container, replace, http_method, monkeypatch): field_name = "Name" exists1 = fake_record({field_name: "Exists 1"}) exists2 = fake_record({field_name: "Exists 2"}) @@ -279,11 +420,13 @@ def test_batch_upsert(table: Table, replace, http_method, monkeypatch): with Mocker() as mock: mock.register_uri( http_method, - table.url, + table.urls.records, response_list=[{"json": response} for response in responses], ) monkeypatch.setattr(table.api, "MAX_RECORDS_PER_REQUEST", 1) - resp = table.batch_upsert(payload, key_fields=[field_name], replace=replace) + resp = table.batch_upsert( + container(payload), key_fields=[field_name], replace=replace + ) assert resp == { "createdRecords": [created["id"]], @@ -306,18 +449,21 @@ def test_delete(table: Table, mock_response_single): id_ = mock_response_single["id"] expected = {"deleted": True, "id": id_} with Mocker() as mock: - mock.delete(urljoin(table.url, id_), status_code=201, json=expected) + mock.delete(table.urls.record(id_), status_code=201, json=expected) resp = table.delete(id_) assert resp == expected -def test_batch_delete(table: Table, mock_records): +@pytest.mark.parametrize("container", [list, tuple, iter]) +def test_batch_delete(table: Table, container, mock_records): ids = [i["id"] for i in mock_records] with Mocker() as mock: for chunk in _chunk(ids, 10): json_response = {"records": [{"deleted": True, "id": id_} for id_ in chunk]} url_match = ( - Request("get", table.url, params={"records[]": chunk}).prepare().url + Request("get", table.urls.records, params={"records[]": chunk}) + .prepare() + .url ) mock.delete( url_match, @@ -325,11 +471,214 @@ def test_batch_delete(table: Table, mock_records): json=json_response, ) - resp = table.batch_delete(ids) + resp = table.batch_delete(container(ids)) expected = [{"deleted": True, "id": i} for i in ids] assert resp == expected +def test_create_field(table, mock_table_schema, requests_mock, sample_json): + """ + Tests the API for creating a field (but without actually performing the operation). + """ + mock_create = requests_mock.post( + table.urls.fields, + json=sample_json("field_schema/SingleSelectFieldSchema"), + ) + + # Ensure we have pre-loaded our schema + table.schema() + assert mock_table_schema.call_count == 1 + + # Create the field + choices = ["Todo", "In progress", "Done"] + fld = table.create_field( + "Status", + "singleSelect", + description="field description", + options={"choices": choices}, + ) + assert mock_create.call_count == 1 + assert mock_create.request_history[-1].json() == { + "name": "Status", + "type": "singleSelect", + "description": "field description", + "options": {"choices": choices}, + } + + # Test the result we got back + assert fld.id == "fldqCjrs1UhXgHUIc" + assert fld.name == "Status" + assert {c.name for c in fld.options.choices} == set(choices) + + # Test that we constructed the URL correctly + assert fld._url.endswith(f"/{table.base.id}/tables/{table.name}/fields/{fld.id}") + + # Test that the schema has been updated without a second API call + assert table._schema.field(fld.id).name == "Status" + assert mock_table_schema.call_count == 1 + + +def test_delete_view(table, mock_table_schema, requests_mock): + view = table.schema().view("Grid view") + m = requests_mock.delete(view._url) + view.delete() + assert m.call_count == 1 + + +fake_upsert = {"updatedRecords": [], "createdRecords": [], "records": []} + + +def test_use_field_ids__get_record(table, monkeypatch, requests_mock): + """ + Test that setting api.use_field_ids=True will change the default behavior + (but not the explicit behavior) of Table.get() + """ + record = fake_record() + url = table.urls.record(record_id := record["id"]) + m = requests_mock.register_uri("GET", url, json=record) + + # by default, we don't pass the param at all + table.get(record_id) + assert m.called + assert "returnFieldsByFieldId" not in m.last_request.qs + + # if use_field_ids=True, we should pass the param... + monkeypatch.setattr(table.api, "use_field_ids", True) + m.reset() + table.get(record_id) + assert m.called + assert m.last_request.qs["returnFieldsByFieldId"] == ["1"] + + # ...but we can override it + m.reset() + table.get(record_id, use_field_ids=False) + assert m.called + assert m.last_request.qs["returnFieldsByFieldId"] == ["0"] + + +@pytest.mark.parametrize("method_name", ("all", "first")) +def test_use_field_ids__get_records(table, monkeypatch, requests_mock, method_name): + """ + Test that setting api.use_field_ids=True will change the default behavior + (but not the explicit behavior) of Table.all() and Table.first() + """ + m = requests_mock.register_uri("GET", table.urls.records, json={"records": []}) + + # by default, we don't pass the param at all + method = getattr(table, method_name) + method() + assert m.called + assert "returnFieldsByFieldId" not in m.last_request.qs + + # if use_field_ids=True, we should pass the param... + monkeypatch.setattr(table.api, "use_field_ids", True) + m.reset() + method() + assert m.called + assert m.last_request.qs["returnFieldsByFieldId"] == ["1"] + + # ...but we can override it + m.reset() + method(use_field_ids=False) + assert m.called + assert m.last_request.qs["returnFieldsByFieldId"] == ["0"] + + +@pytest.mark.parametrize( + "method_name,method_args,http_method,suffix,response", + [ + ("create", ({"fields": {}}), "POST", "", fake_record()), + ("update", ("rec123", {}), "PATCH", "rec123", fake_record()), + ("batch_create", ([fake_record()],), "POST", "", {"records": []}), + ("batch_update", ([fake_record()],), "PATCH", "", {"records": []}), + ("batch_upsert", ([fake_record()], ["Key"]), "PATCH", "", fake_upsert), + ], +) +def test_use_field_ids__post( + table, + monkeypatch, + requests_mock, + method_name, + method_args, + http_method, + suffix, + response, +): + """ + Test that setting api.use_field_ids=True will change the default behavior + (but not the explicit behavior) of the create/update API methods on Table. + """ + url = table.urls.records / suffix + print(f"{url=}") + m = requests_mock.register_uri(http_method, url.rstrip("/"), json=response) + + # by default, the param is False + method = getattr(table, method_name) + method(*method_args) + assert m.call_count == 1 + assert m.last_request.json()["returnFieldsByFieldId"] is False + + # if use_field_ids=True, we should pass the param... + monkeypatch.setattr(table.api, "use_field_ids", True) + m.reset() + method(*method_args) + assert m.call_count == 1 + assert m.last_request.json()["returnFieldsByFieldId"] is True + + # ...but we can override it + m.reset() + method(*method_args, use_field_ids=False) + assert m.call_count == 1 + assert m.last_request.json()["returnFieldsByFieldId"] is False + + +RECORD_ID = fake_id() +FIELD_ID = fake_id("fld") + + +@pytest.fixture +def mock_upload_attachment(requests_mock, table): + return requests_mock.post( + f"https://content.airtable.com/v0/{table.base.id}/{RECORD_ID}/{FIELD_ID}/uploadAttachment", + status_code=200, + json={ + "id": RECORD_ID, + "createdTime": NOW, + "fields": {FIELD_ID: [fake_attachment()]}, + }, + ) + + +@pytest.mark.parametrize("content", [b"Hello, World!", "Hello, World!"]) +def test_upload_attachment(mock_upload_attachment, table, content): + """ + Test that we can upload an attachment to a record. + """ + table.upload_attachment(RECORD_ID, FIELD_ID, "sample.txt", content) + assert mock_upload_attachment.last_request.json() == { + "contentType": "text/plain", + "file": "SGVsbG8sIFdvcmxkIQ==\n", # base64 encoded "Hello, World!" + "filename": "sample.txt", + } + + +def test_upload_attachment__no_content_type(mock_upload_attachment, table, tmp_path): + """ + Test that we can upload an attachment to a record. + """ + tmp_file = tmp_path / "sample_no_extension" + tmp_file.write_bytes(b"Hello, World!") + + with pytest.warns(Warning, match="Could not guess content-type"): + table.upload_attachment(RECORD_ID, FIELD_ID, tmp_file) + + assert mock_upload_attachment.last_request.json() == { + "contentType": "application/octet-stream", + "file": "SGVsbG8sIFdvcmxkIQ==\n", # base64 encoded "Hello, World!" + "filename": "sample_no_extension", + } + + # Helpers diff --git a/tests/test_api_types.py b/tests/test_api_types.py index e124510d..2ed38ff2 100644 --- a/tests/test_api_types.py +++ b/tests/test_api_types.py @@ -1,6 +1,6 @@ +import pydantic import pytest -from pyairtable._compat import pydantic from pyairtable.api import types as T from pyairtable.testing import fake_attachment, fake_id, fake_record, fake_user @@ -14,7 +14,9 @@ (T.ButtonDict, {"label": "My Button", "url": "http://example.com"}), (T.ButtonDict, {"label": "My Button", "url": None}), (T.CollaboratorDict, fake_user()), - (T.CreateAttachmentDict, {"url": "http://example.com", "filename": "test.jpg"}), + (T.CreateAttachmentById, {"id": "att123"}), + (T.CreateAttachmentByUrl, {"url": "http://example.com"}), + (T.CreateAttachmentDict, {"id": "att123"}), (T.CreateAttachmentDict, {"url": "http://example.com"}), (T.CreateRecordDict, {"fields": {}}), (T.RecordDeletedDict, {"deleted": True, "id": fake_id()}), @@ -35,6 +37,15 @@ def test_assert_typed_dict(cls, value): T.assert_typed_dicts(cls, [value, -1]) +def test_assert_typed_dict__fail_union(): + """ + Test that we get the correct error message when assert_typed_dict + fails when called with a union of TypedDicts. + """ + with pytest.raises(pydantic.ValidationError): + T.assert_typed_dict(T.CreateAttachmentDict, {"not": "good"}) + + @pytest.mark.parametrize( "cls,value", [ @@ -42,7 +53,8 @@ def test_assert_typed_dict(cls, value): (T.BarcodeDict, {"type": "upc"}), (T.ButtonDict, {}), (T.CollaboratorDict, {}), - (T.CreateAttachmentDict, {}), + (T.CreateAttachmentById, {}), + (T.CreateAttachmentByUrl, {}), (T.CreateRecordDict, {}), (T.RecordDeletedDict, {}), (T.RecordDict, {}), diff --git a/tests/test_api_workspace.py b/tests/test_api_workspace.py new file mode 100644 index 00000000..1bf51064 --- /dev/null +++ b/tests/test_api_workspace.py @@ -0,0 +1,87 @@ +import pytest + +from pyairtable.api.base import Base +from pyairtable.api.workspace import Workspace + + +@pytest.fixture +def workspace_id(): + return "wspFakeWorkspaceId" + + +@pytest.fixture +def workspace(api, workspace_id): + return Workspace(api, workspace_id) + + +@pytest.fixture +def mock_info(workspace, requests_mock, sample_json): + return requests_mock.get( + workspace.urls.meta, json=sample_json("WorkspaceCollaborators") + ) + + +def test_collaborators(workspace, mock_info): + assert workspace.collaborators().id == "wspmhESAta6clCCwF" + assert workspace.collaborators().name == "my first workspace" + assert mock_info.call_count == 1 + + +def test_name(workspace, mock_info): + assert workspace.name == "my first workspace" + assert mock_info.call_count == 1 + + +def test_bases(workspace, mock_info): + bases = workspace.bases() + assert len(bases) == 2 + assert bases[0].id == "appLkNDICXNqxSDhG" + assert bases[1].id == "appSW9R5uCNmRmfl6" + assert mock_info.call_count == 1 + + +def test_create_base(workspace, requests_mock, sample_json): + url = workspace.api.urls.bases + requests_mock.get(url, json=sample_json("Bases")) + requests_mock.post(url, json={"id": "appLkNDICXNqxSDhG"}) + base = workspace.create_base("Base Name", []) + assert isinstance(base, Base) + assert base.id == "appLkNDICXNqxSDhG" + + +def test_delete(workspace, requests_mock): + m = requests_mock.delete( + workspace.urls.meta, json={"id": workspace.id, "deleted": True} + ) + workspace.delete() + assert m.call_count == 1 + + +@pytest.mark.parametrize("workspace_param", ["workspace", "workspace_id"]) +@pytest.mark.parametrize("base_param", ["base", "base_id"]) +@pytest.mark.parametrize( + "kwargs,expected", + [ + ({}, {}), + ({"index": 8}, {"targetIndex": 8}), + ], +) +def test_move_base( + workspace, + workspace_id, + workspace_param, + base, + base_id, + base_param, + kwargs, + expected, + requests_mock, +): + m = requests_mock.post(workspace.urls.move_base) + workspace.move_base(locals()[base_param], locals()[workspace_param], **kwargs) + assert m.call_count == 1 + assert m.request_history[-1].json() == { + "baseId": base_id, + "targetWorkspaceId": workspace_id, + **expected, + } diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 00000000..8831a043 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,257 @@ +import json +from unittest import mock + +import pytest +from click.testing import CliRunner + +import pyairtable.cli +import pyairtable.orm.generate +from pyairtable.testing import fake_id + + +@pytest.fixture +def user_id(): + return "usrL2PNC5o3H4lBEi" + + +@pytest.fixture(autouse=True) +def mock_metadata( + api, + user_id, + mock_base_metadata, + mock_workspace_metadata, + enterprise, + requests_mock, + sample_json, +): + user_info = sample_json("UserInfo") + user_group = sample_json("UserGroup") + enterprise_info = sample_json("EnterpriseInfo") + requests_mock.get(api.urls.whoami, json={"id": user_id}) + requests_mock.get(enterprise.urls.meta, json=enterprise_info) + requests_mock.get(enterprise.urls.users, json={"users": [user_info]}) + requests_mock.get(enterprise.urls.user(user_id), json=user_info) + for group_id in enterprise_info["groupIds"]: + requests_mock.get(enterprise.urls.group(group_id), json=user_group) + + +@pytest.fixture +def run(mock_metadata, monkeypatch): + default_env = {"AIRTABLE_API_KEY": "test"} + + def _runner(*args: str, env: dict = default_env, fails: bool = False): + # make sure we're starting from a blank environment + monkeypatch.delenv("AIRTABLE_API_KEY", raising=False) + monkeypatch.delenv("AIRTABLE_API_KEY_FILE", raising=False) + # run the command + runner = CliRunner(env=env) + result = runner.invoke(pyairtable.cli.cli, args) + # if a test fails, show the command's output + print(f"{result.output=}") + if fails and result.exit_code == 0: + raise RuntimeError("expected failure, but command succeeded") + if result.exit_code != 0 and not fails: + print(f"{result.exception=}") + if hasattr(result.exception, "request"): + print(f"{result.exception.request.url=}") + raise RuntimeError(f"command failed: {args}") + return result + + def _runner_with_json(*args, **kwargs): + result = _runner(*args, **kwargs) + assert result.stdout, "command did not produce any output" + return json.loads(result.stdout) + + _runner.json = _runner_with_json + + return _runner + + +def test_help(run): + """ + Test that the --help message lists the correct top-level commands. + """ + result = run("--help") + lines = result.output.split("Commands:", 1)[1].splitlines() + defined_commands = set(pyairtable.cli.CLI_COMMANDS) + listed_commands = set(line.strip().split(" ")[0] for line in lines) + assert not defined_commands - listed_commands + + +def test_error_without_key(run): + result = run("whoami", env={}, fails=True) + assert "--key, --key-file, or --key-env required" in result.output + + +def test_error_invalid_command(run): + run("asdf", fails=True) + + +def test_invalid_key_args(run, tmp_path): + keyfile = tmp_path / "keyfile.txt" + keyfile.write_text("fakeKey") + for args in [ + ("--key", "key", "--key-file", keyfile), + ("--key", "key", "--key-env", "key"), + ("--key-env", "key", "--key-file", keyfile), + ]: + result = run(*args, "whoami", env={}, fails=True) + print(args) + assert "only one of --key, --key-file, --key-env allowed" in result.output + + +@pytest.mark.parametrize("cmd", ["whoami", "who", "w"]) # test alias +def test_whoami(run, cmd, user_id): + result = run.json(cmd) + assert result == {"id": user_id} + + +@pytest.mark.parametrize("option", ["-k", "--key"]) +def test_whoami__key(run, option, user_id): + result = run.json(option, "key", "whoami", env={}) + assert result == {"id": user_id} + + +@pytest.mark.parametrize("option", ["-ke", "--key-env"]) +def test_whoami__keyenv(run, option, user_id): + env = {"THE_KEY": "fakeKey"} + result = run.json(option, "THE_KEY", "whoami", env=env) + assert result == {"id": user_id} + + +@pytest.mark.parametrize("option", ["-kf", "--key-file"]) +def test_whoami__keyfile(run, option, user_id, tmp_path): + keyfile = tmp_path / "keyfile.txt" + keyfile.write_text("fakeKey") + result = run.json(option, keyfile, "whoami", env={}) + assert result == {"id": user_id} + + +def test_bases(run, base): + result = run.json("bases") + assert len(result) == 2 + assert result[0]["id"] == base.id + + +def test_base(run): + result = run("base", fails=True) + assert "Missing argument 'BASE_ID'" in result.output + + +@pytest.mark.parametrize("cmd", ["orm", "o"]) # test alias +def test_base_orm(base, run, cmd): + result = run("base", base.id, cmd) + expected = str(pyairtable.orm.generate.ModelFileBuilder(base)) + assert result.output.rstrip().endswith(expected) + + +@pytest.mark.parametrize("extra_args", [[], ["schema"]]) +def test_base_schema(run, base, extra_args): + result = run.json("base", base.id, *extra_args) + assert list(result) == ["tables"] + assert result["tables"][0]["name"] == "Apartments" + + +@pytest.mark.parametrize("cmd", ["records", "r"]) # test alias +@pytest.mark.parametrize( + "extra_args,expected_kwargs", + [ + ([], {}), + (["-f", "$formula"], {"formula": "$formula"}), + (["--formula", "$formula"], {"formula": "$formula"}), + (["-v", "$view"], {"view": "$view"}), + (["--view", "$view"], {"view": "$view"}), + (["-n", 10], {"max_records": 10}), + (["--limit", 10], {"max_records": 10}), + (["-F", "$fld1", "--field", "$fld2"], {"fields": ["$fld1", "$fld2"]}), + (["-S", "fld1", "--sort", "-fld2"], {"sort": ["fld1", "-fld2"]}), + ], +) +@mock.patch("pyairtable.Table.all") +def test_base_table_records( + mock_table_all, run, cmd, base, extra_args, expected_kwargs +): + defaults = { + "formula": None, + "view": None, + "max_records": None, + "fields": [], + "sort": [], + } + expected = {**defaults, **expected_kwargs} + fake_ids = [fake_id() for _ in range(3)] + mock_table_all.return_value = [{"id": id} for id in fake_ids] + result = run.json("base", base.id, "table", "Apartments", cmd, *extra_args) + mock_table_all.assert_called_once_with(**expected) + assert len(result) == 3 + assert set(record["id"] for record in result) == set(fake_ids) + + +@pytest.mark.parametrize("extra_args", [[], ["schema"]]) +def test_base_table_schema(run, base, extra_args): + result = run.json("base", base.id, "table", "Apartments", *extra_args) + assert result["fields"][0]["id"] == "fld1VnoyuotSTyxW1" + + +@pytest.mark.parametrize("cmd", ["c", "collaborators"]) +def test_base_collaborators(run, base, cmd): + result = run.json("base", base.id, cmd) + assert result["id"] == base.id + assert result["collaborators"]["baseCollaborators"][0]["email"] == "foo@bam.com" + + +@pytest.mark.parametrize("cmd", ["sh", "shares"]) +def test_base_shares(run, base, cmd): + result = run.json("base", base.id, cmd) + assert result[0]["shareId"] == "shr9SpczJvQpfAzSp" + + +@pytest.mark.parametrize("cmd", ["e", "enterprise"]) +@pytest.mark.parametrize("extra_args", [[], ["info"]]) +def test_enterprise_info(run, enterprise, cmd, extra_args): + result = run.json(cmd, enterprise.id, *extra_args) + assert result["id"] == enterprise.id + + +def test_enterprise_user(run, enterprise, user_id): + result = run.json("enterprise", enterprise.id, "user", user_id) + assert result["id"] == user_id + assert result["email"] == "foo@bar.com" + + +def test_enterprise_users(run, enterprise, user_id): + result = run.json("enterprise", enterprise.id, "users", user_id) + assert list(result) == [user_id] + assert result[user_id]["id"] == user_id + assert result[user_id]["email"] == "foo@bar.com" + + +def test_enterprise_users__all(run, enterprise, user_id): + result = run.json("enterprise", enterprise.id, "users", "--all") + assert list(result) == [user_id] + assert result[user_id]["id"] == user_id + assert result[user_id]["email"] == "foo@bar.com" + + +def test_enterprise_users__invalid(run, enterprise, user_id): + run("enterprise", enterprise.id, "users", fails=True) + run("enterprise", enterprise.id, "users", "--all", user_id, fails=True) + + +def test_enterprise_group(run, enterprise): + result = run.json("enterprise", enterprise.id, "group", "ugp1mKGb3KXUyQfOZ") + assert result["id"] == "ugp1mKGb3KXUyQfOZ" + assert result["name"] == "Group name" + + +@pytest.mark.parametrize("option", ["ugp1mKGb3KXUyQfOZ", "-a", "--all"]) +def test_enterprise_groups(run, enterprise, option): + result = run.json("enterprise", enterprise.id, "groups", option) + assert list(result) == ["ugp1mKGb3KXUyQfOZ"] + assert result["ugp1mKGb3KXUyQfOZ"]["id"] == "ugp1mKGb3KXUyQfOZ" + assert result["ugp1mKGb3KXUyQfOZ"]["name"] == "Group name" + + +def test_enterprise_groups__invalid(run, enterprise): + run("enterprise", enterprise.id, "groups", fails=True) + run("enterprise", enterprise.id, "groups", "--all", "ugp1mKGb3KXUyQfOZ", fails=True) diff --git a/tests/test_formulas.py b/tests/test_formulas.py index 4e36a89d..6514cb73 100644 --- a/tests/test_formulas.py +++ b/tests/test_formulas.py @@ -1,88 +1,481 @@ +from datetime import date, datetime, timezone +from decimal import Decimal +from fractions import Fraction + import pytest +from mock import call + +import pyairtable.exceptions +from pyairtable import formulas as F +from pyairtable import orm +from pyairtable.formulas import AND, EQ, GT, GTE, LT, LTE, NE, NOT, OR +from pyairtable.testing import fake_meta + + +def test_equivalence(): + assert F.Formula("a") == F.Formula("a") + assert F.Formula("a") != F.Formula("b") + assert F.Formula("a") != "b" + -from pyairtable.formulas import ( - AND, - EQUAL, - FIELD, - FIND, - IF, - LOWER, - OR, - STR_VALUE, - escape_quotes, - match, +def test_operators(): + lft = F.Formula("a") + rgt = F.Formula("b") + assert str(lft) == "a" + assert str(lft & rgt) == "AND(a, b)" + assert str(lft | rgt) == "OR(a, b)" + assert str(~(lft & rgt)) == "NOT(AND(a, b))" + assert repr(lft & rgt) == "AND(Formula('a'), Formula('b'))" + assert repr(lft | rgt) == "OR(Formula('a'), Formula('b'))" + assert repr(~F.Formula("a")) == "NOT(Formula('a'))" + assert lft.flatten() is lft + assert repr(lft ^ rgt) == "XOR(Formula('a'), Formula('b'))" + assert str(lft ^ rgt) == "XOR(a, b)" + + +@pytest.mark.parametrize( + "cmp,op", + [ + (EQ, "="), + (NE, "!="), + (GT, ">"), + (GTE, ">="), + (LT, "<"), + (LTE, "<="), + ], +) +def test_comparisons(cmp, op): + assert repr(cmp(1, 1)) == f"{cmp.__name__}(1, 1)" + assert str(cmp(1, 1)) == f"1{op}1" + assert str(cmp(F.Formula("Foo"), "Foo")) == f"Foo{op}'Foo'" + + +@pytest.mark.parametrize( + "target", + [ + F.Formula("X"), # Formula + F.Field("X"), # Field + F.EQ(1, 1), # Comparison + F.TODAY(), # FunctionCall + ], +) +@pytest.mark.parametrize( + "shortcut,cmp", + [ + ("eq", EQ), + ("ne", NE), + ("gt", GT), + ("gte", GTE), + ("lt", LT), + ("lte", LTE), + ], ) +def test_comparison_shortcuts(target, shortcut, cmp): + """ + Test that methods like .eq() are exposed on all subclasses of Formula. + """ + formula = getattr(target, shortcut)("Y") # Field("X").eq("Y") + assert formula == cmp(target, "Y") # EQ(Field("X"), "Y") -def test_equal(): - assert EQUAL("A", "B") == "A=B" +def test_comparison_equivalence(): + assert EQ(1, 1) == EQ(1, 1) + assert EQ(1, 2) != EQ(2, 1) + assert EQ(1, 1) != NE(1, 1) + assert EQ(1, 1) != F.Formula("1=1") -def test_field(): - assert FIELD("Name") == "{Name}" - assert FIELD("Guest's Name") == r"{Guest\'s Name}" +def test_comparison_is_abstract(): + with pytest.raises(NotImplementedError): + F.Comparison("lft", "rgt") -def test_and(): - assert AND("A", "B", "C") == "AND(A,B,C)" +@pytest.mark.parametrize("op", ("AND", "OR")) +def test_compound(op): + cmp = F.Compound(op, [EQ("foo", 1), EQ("bar", 2)]) + assert repr(cmp) == f"{op}(EQ('foo', 1), EQ('bar', 2))" -def test_or(): - assert OR("A", "B", "C") == "OR(A,B,C)" +@pytest.mark.parametrize("op", ("AND", "OR")) +def test_compound_with_iterable(op): + cmp = F.Compound(op, (EQ(f"f{n}", n) for n in range(3))) + assert repr(cmp) == f"{op}(EQ('f0', 0), EQ('f1', 1), EQ('f2', 2))" -def test_if(): - assert IF(1, 0, 1) == "IF(1, 0, 1)" +def test_compound_equivalence(): + assert F.Compound("AND", [1]) == F.Compound("AND", [1]) + assert F.Compound("AND", [1]) != F.Compound("AND", [2]) + assert F.Compound("AND", [1]) != F.Compound("OR", [1]) + assert F.Compound("AND", [1]) != [1] -def test_find(): - rv = FIND(STR_VALUE(2021), FIELD("DatetimeCol")) - assert rv == "FIND('2021', {DatetimeCol})" - rv = FIND(STR_VALUE(2021), FIELD("DatetimeCol"), 2) - assert rv == "FIND('2021', {DatetimeCol}, 2)" +@pytest.mark.parametrize("cmp", [AND, OR]) +@pytest.mark.parametrize( + "call_args", + [ + # mix *components and and **fields + call(EQ("foo", 1), bar=2), + # multiple *components + call(EQ("foo", 1), EQ(F.Field("bar"), 2)), + # one item in *components that is also an iterable + call([EQ("foo", 1), EQ(F.Field("bar"), 2)]), + call((EQ("foo", 1), EQ(F.Field("bar"), 2))), + lambda: call(iter([EQ("foo", 1), EQ(F.Field("bar"), 2)])), + # test that we accept `str` and convert to formulas + call(["'foo'=1", "{bar}=2"]), + ], +) +def test_compound_constructors(cmp, call_args): + if type(call_args) is not type(call): + call_args = call_args() + compound = cmp(*call_args.args, **call_args.kwargs) + expected = cmp(EQ("foo", 1), EQ(F.Field("bar"), 2)) + # compare final output expression, since the actual values will not be equal + assert str(compound) == str(expected) -def test_string_value(): - assert STR_VALUE("A") == "'A'" +@pytest.mark.parametrize("cmp", ["AND", "OR", "NOT"]) +def test_compound_without_parameters(cmp): + with pytest.raises( + ValueError, + match=r"Compound\(\) requires at least one component", + ): + F.Compound(cmp, []) -def test_combination(): - formula = AND( - EQUAL(FIELD("First Name"), STR_VALUE("A")), - EQUAL(FIELD("Last Name"), STR_VALUE("B")), - EQUAL(FIELD("Age"), STR_VALUE(15)), +def test_compound_flatten(): + a = EQ("a", "a") + b = EQ("b", "b") + c = EQ("c", "c") + d = EQ("d", "d") + e = EQ("e", "e") + c = (a & b) & (c & (d | e)) + assert repr(c) == repr( + AND( + AND(EQ("a", "a"), EQ("b", "b")), + AND(EQ("c", "c"), OR(EQ("d", "d"), EQ("e", "e"))), + ) ) - assert formula == ("AND({First Name}='A',{Last Name}='B',{Age}='15')") + assert repr(c.flatten()) == repr( + AND( + EQ("a", "a"), + EQ("b", "b"), + EQ("c", "c"), + OR(EQ("d", "d"), EQ("e", "e")), + ) + ) + assert repr((~c).flatten()) == repr( + NOT( + AND( + EQ("a", "a"), + EQ("b", "b"), + EQ("c", "c"), + OR(EQ("d", "d"), EQ("e", "e")), + ) + ) + ) + assert str((~c).flatten()) == ( + "NOT(AND('a'='a', 'b'='b', 'c'='c', OR('d'='d', 'e'='e')))" + ) + + +def test_compound_flatten_circular_dependency(): + circular = NOT(F.Formula("x")) + circular.components = [circular] + with pytest.raises(pyairtable.exceptions.CircularFormulaError): + circular.flatten() @pytest.mark.parametrize( - "dict,kwargs,expected_formula", + "compound,expected", [ - ({"First Name": "John"}, {"match_any": False}, "{First Name}='John'"), - ({"First Name": "John"}, {"match_any": True}, "{First Name}='John'"), - ({"A": "1", "B": "2"}, {"match_any": False}, "AND({A}='1',{B}='2')"), - ({"A": "1", "B": "2"}, {"match_any": True}, "OR({A}='1',{B}='2')"), - ({}, {"match_any": False}, ""), - ({}, {"match_any": True}, ""), + (EQ(1, 1).eq(True), "(1=1)=TRUE()"), + (EQ(False, EQ(1, 2)), "FALSE()=(1=2)"), ], ) -def test_match(dict, kwargs, expected_formula): - rv = match(dict, **kwargs) - assert rv == expected_formula +def test_compound_with_compound(compound, expected): + assert str(compound) == expected + + +def test_not(): + assert str(NOT(EQ("foo", 1))) == "NOT('foo'=1)" + assert str(NOT(foo=1)) == "NOT({foo}=1)" + + with pytest.raises(TypeError): + NOT(EQ("foo", 1), EQ("bar", 2)) + + with pytest.raises(ValueError, match="requires exactly one condition; got 2"): + NOT(EQ("foo", 1), bar=2) + + with pytest.raises(ValueError, match="requires exactly one condition; got 2"): + NOT(foo=1, bar=2) + + with pytest.raises(ValueError, match="requires exactly one condition; got 0"): + NOT() @pytest.mark.parametrize( - "text,escaped", + "input,expected", [ - ("hello", "hello"), - ("player's name", r"player\'s name"), - (r"player\'s name", r"player\'s name"), + (EQ(F.Formula("a"), "b"), EQ(F.Formula("a"), "b")), + (True, F.TRUE()), + (False, F.FALSE()), + (3, F.Formula("3")), + (3.5, F.Formula("3.5")), + (Decimal("3.14159265"), F.Formula("3.14159265")), + (Fraction("4/19"), F.Formula("4/19")), + ("asdf", F.Formula("'asdf'")), + ("Jane's", F.Formula("'Jane\\'s'")), + ([1, 2, 3], TypeError), + ((1, 2, 3), TypeError), + ({1, 2, 3}, TypeError), + ({1: 2, 3: 4}, TypeError), + ( + date(2023, 12, 1), + F.DATETIME_PARSE("2023-12-01"), + ), + ( + datetime(2023, 12, 1, 12, 34, 56), + F.DATETIME_PARSE("2023-12-01T12:34:56.000"), + ), + ( + datetime(2023, 12, 1, 12, 34, 56, tzinfo=timezone.utc), + F.DATETIME_PARSE("2023-12-01T12:34:56.000Z"), + ), + (orm.fields.Field("Foo"), F.Field("Foo")), ], ) -def test_escape_quotes(text, escaped): - rv = escape_quotes(text) - assert rv == escaped +def test_to_formula(input, expected): + """ + Test that certain values are not changed at all by to_formula() + """ + if isinstance(expected, type) and issubclass(expected, Exception): + with pytest.raises(expected): + F.to_formula(input) + else: + assert F.to_formula(input) == expected -def test_lower(): - assert LOWER("TestValue") == "LOWER(TestValue)" +@pytest.mark.parametrize( + "input,expected", + [ + (EQ(F.Formula("a"), "b"), "a='b'"), + (True, "TRUE()"), + (False, "FALSE()"), + (3, "3"), + (3.5, "3.5"), + (Decimal("3.14159265"), "3.14159265"), + (Fraction("4/19"), "4/19"), + ("asdf", "'asdf'"), + ("Jane's", "'Jane\\'s'"), + ([1, 2, 3], TypeError), + ((1, 2, 3), TypeError), + ({1, 2, 3}, TypeError), + ({1: 2, 3: 4}, TypeError), + ( + date(2023, 12, 1), + "DATETIME_PARSE('2023-12-01')", + ), + ( + datetime(2023, 12, 1, 12, 34, 56), + "DATETIME_PARSE('2023-12-01T12:34:56.000')", + ), + ( + datetime(2023, 12, 1, 12, 34, 56, tzinfo=timezone.utc), + "DATETIME_PARSE('2023-12-01T12:34:56.000Z')", + ), + (orm.fields.Field("Foo"), "{Foo}"), + ], +) +def test_to_formula_str(input, expected): + if isinstance(expected, type) and issubclass(expected, Exception): + with pytest.raises(expected): + F.to_formula_str(input) + else: + assert F.to_formula_str(input) == expected + + +@pytest.mark.parametrize( + "sig,expected", + [ + (call({"Field": "value"}), "{Field}='value'"), + (call({"A": ("=", 123), "B": ("!=", 123)}), "AND({A}=123, {B}!=123)"), + (call({"A": 123, "B": 123}, match_any=True), "OR({A}=123, {B}=123)"), + (call({"Field": ("<", 123)}), "{Field}<123"), + (call({"Field": ("<=", 123)}), "{Field}<=123"), + (call({"Field": (">", 123)}), "{Field}>123"), + (call({"Field": (">=", 123)}), "{Field}>=123"), + ], +) +def test_match(sig, expected): + assert str(F.match(*sig.args, **sig.kwargs)) == expected + + +def test_match__exception(): + with pytest.raises(ValueError): + F.match({}) + + +def test_function_call(): + fc = F.FunctionCall("IF", 1, True, False) + assert repr(fc) == "IF(1, True, False)" + assert str(fc) == "IF(1, TRUE(), FALSE())" + + +def test_function_call_equivalence(): + assert F.TODAY() == F.TODAY() + assert F.TODAY() != F.NOW() + assert F.CEILING(1) == F.CEILING(1) + assert F.CEILING(1) != F.CEILING(2) + assert F.TODAY() != F.Formula("TODAY()") + + +@pytest.mark.parametrize( + "input,expected", + [ + ("First Name", "{First Name}"), + ("Guest's Name", r"{Guest's Name}"), + ("With {Curly Braces}", r"{With {Curly Braces\}}"), + ], +) +def test_field_name(input, expected): + assert F.field_name(input) == expected + + +def test_quoted(): + assert F.quoted("Guest") == "'Guest'" + assert F.quoted("Guest's Name") == r"'Guest\'s Name'" + assert F.quoted(F.quoted("Guest's Name")) == r"'\'Guest\\\'s Name\''" + + +class FakeModel(orm.Model): + Meta = fake_meta() + name = orm.fields.TextField("Name") + email = orm.fields.EmailField("Email") + phone = orm.fields.PhoneNumberField("Phone") + + +@pytest.mark.parametrize( + "methodname,op", + [ + ("eq", "="), + ("ne", "!="), + ("gt", ">"), + ("gte", ">="), + ("lt", "<"), + ("lte", "<="), + ], +) +def test_orm_field_comparison_shortcuts(methodname, op): + """ + Test each shortcut method on an ORM field. + """ + formula = getattr(FakeModel.name, methodname)("Value") + assert F.to_formula_str(formula) == f"{{Name}}{op}'Value'" + + +def test_orm_field_as_formula(): + """ + Test different ways of using an ORM field in a formula. + """ + formula = FakeModel.email.ne(F.BLANK()) | NE(FakeModel.phone, F.BLANK()) + formula &= FakeModel.name + result = F.to_formula_str(formula.flatten()) + assert result == "AND(OR({Email}!=BLANK(), {Phone}!=BLANK()), {Name})" + + +@pytest.mark.parametrize( + "fn,argcount", + [ + ("ABS", 1), + ("AVERAGE", 2), + ("BLANK", 0), + ("CEILING", 2), + ("CONCATENATE", 2), + ("COUNT", 2), + ("COUNTA", 2), + ("COUNTALL", 2), + ("CREATED_TIME", 0), + ("DATEADD", 3), + ("DATESTR", 1), + ("DATETIME_DIFF", 3), + ("DATETIME_FORMAT", 2), + ("DATETIME_PARSE", 3), + ("DAY", 1), + ("ENCODE_URL_COMPONENT", 1), + ("ERROR", 0), + ("EVEN", 1), + ("EXP", 1), + ("FALSE", 0), + ("FIND", 3), + ("FLOOR", 2), + ("FROMNOW", 1), + ("HOUR", 1), + ("IF", 3), + ("INT", 1), + ("ISERROR", 1), + ("IS_AFTER", 2), + ("IS_BEFORE", 2), + ("IS_SAME", 3), + ("LAST_MODIFIED_TIME", 1), + ("LEFT", 2), + ("LEN", 1), + ("LOG", 2), + ("LOWER", 1), + ("MAX", 2), + ("MID", 3), + ("MIN", 2), + ("MINUTE", 1), + ("MOD", 2), + ("MONTH", 1), + ("NOW", 0), + ("ODD", 1), + ("POWER", 2), + ("RECORD_ID", 0), + ("REGEX_EXTRACT", 2), + ("REGEX_MATCH", 2), + ("REGEX_REPLACE", 3), + ("REPLACE", 4), + ("REPT", 2), + ("RIGHT", 2), + ("ROUND", 2), + ("ROUNDDOWN", 2), + ("ROUNDUP", 2), + ("SEARCH", 3), + ("SECOND", 1), + ("SET_LOCALE", 2), + ("SET_TIMEZONE", 2), + ("SQRT", 1), + ("SUBSTITUTE", 4), + ("SUM", 2), + ("SWITCH", 4), + ("T", 1), + ("TIMESTR", 1), + ("TODAY", 0), + ("TONOW", 1), + ("TRIM", 1), + ("TRUE", 0), + ("UPPER", 1), + ("VALUE", 1), + ("WEEKDAY", 2), + ("WEEKNUM", 2), + ("WORKDAY", 3), + ("WORKDAY_DIFF", 3), + ("XOR", 2), + ("YEAR", 1), + ], +) +def test_function_calls(fn, argcount): + """ + Test that the function call shortcuts in the formulas module + all behave as expected with the given number of arguments. + """ + args = tuple(f"arg{n}" for n in range(1, argcount + 1)) + args_repr = ", ".join(repr(arg) for arg in args) + args_formula = ", ".join(F.to_formula_str(arg) for arg in args) + result = getattr(F, fn)(*args) + assert isinstance(result, F.FunctionCall) + assert result.name == fn + assert result.args == args + assert repr(result) == f"{fn}({args_repr})" + assert str(result) == f"{fn}({args_formula})" diff --git a/tests/test_models.py b/tests/test_models.py index f8b06f8e..e830e616 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,9 +1,14 @@ +from datetime import datetime, timezone +from typing import List + import pytest from pyairtable.models._base import ( AirtableModel, - SerializableModel, - update_forward_refs, + CanDeleteModel, + CanUpdateModel, + RestfulModel, + rebuild_models, ) @@ -15,37 +20,70 @@ def raw_data(): @pytest.fixture def create_instance(api, raw_data): def _creates_instance(**kwargs): - class Subclass(SerializableModel, **kwargs): + # These kwargs used to be interpreted by __init_subclass__ but now that behavior + # is controlled by mixins. This weirdness is just to avoid redoing our tests. + base_classes = [] + if kwargs.pop("allow_update", True): + base_classes.append(CanUpdateModel) + if kwargs.pop("allow_delete", True): + base_classes.append(CanDeleteModel) + + kwargs.setdefault("url", "https://example.com/{self.foo}/{self.bar}/{self.baz}") + + class Subclass(*base_classes, **kwargs): foo: int bar: int baz: int - return Subclass.from_api(api, "https://www.example.com", raw_data) + return Subclass.from_api(raw_data, api) return _creates_instance -def test_raw(raw_data): +def test_raw(api): """ - Test that AirtableModel.parse_obj saves the raw value, so that developers + Test that AirtableModel.from_api saves the raw value, so that developers can access the exact payload we received from the API. This is mostly - in case Airtable adds new things to webhooks or webhook payloads in the future. + in case Airtable adds new things to webhooks or schemas in the future. """ - obj = AirtableModel.parse_obj(raw_data) + + class Grandchild(AirtableModel): + value: int + + class Child(AirtableModel): + grandchild: Grandchild + + class Parent(AirtableModel): + child: Child + + raw = {"child": {"grandchild": {"value": 1}}, "foo": "FOO", "bar": "BAR"} + obj = Parent.from_api(raw, api) assert not hasattr(obj, "foo") assert not hasattr(obj, "bar") - assert obj._raw == raw_data + assert obj._raw == raw + assert obj.child._raw == raw["child"] + assert obj.child.grandchild._raw == raw["child"]["grandchild"] -def test_from_api(raw_data): +@pytest.mark.parametrize("prefix", ["https://api.airtable.com/v0/prefix", "prefix"]) +def test_from_api(raw_data, prefix, api): """ - Test that SerializableModel.from_api persists its parameters correctly. + Test that CanUpdate.from_api persists its parameters correctly, + and that if `url=` is passed to the subclass, we'll always get a valid URL. """ - url = "https://www.example.com" - obj = SerializableModel.from_api("api", url, raw_data) - assert obj._api == "api" - assert obj._url == url + + class Dummy(CanUpdateModel, url="{prefix}/foo={self.foo}/bar={self.bar}"): + foo: int + bar: int + + obj = Dummy.from_api(raw_data, api, context={"prefix": prefix}) + assert obj._api == api assert obj._raw == raw_data + assert obj._url == "https://api.airtable.com/v0/prefix/foo=1/bar=2" + assert obj.foo == 1 + assert obj.bar == 2 + assert not hasattr(obj, "baz") + assert obj._raw["baz"] == 3 def test_save(requests_mock, create_instance): @@ -65,10 +103,84 @@ def test_save(requests_mock, create_instance): def test_save_not_allowed(create_instance): obj = create_instance(allow_update=False) - with pytest.raises(NotImplementedError): + with pytest.raises(AttributeError): + obj.save() + + +def test_save_without_url(create_instance): + """ + Test that if we do not provide context for computing a URL when an instance + is created, we won't be able to save it later. + """ + obj = create_instance(url="") + with pytest.raises(RuntimeError): obj.save() +def test_save__nested_reload(requests_mock, api): + """ + Test that reloading an object with nested models correctly reloads all of them, + while preserving those nested models' access to the API. + """ + + class Parent(CanUpdateModel, url="foo/{self.id}"): + id: int + name: str + children: List["Parent.Child"] # noqa + + class Child(CanUpdateModel, url="foo/{parent.id}/child/{child.id}"): + id: int + name: str + + rebuild_models(Parent) + + parent_data = { + "id": 1, + "name": "One", + "children": [ + (child2_data := {"id": 2, "name": "Two"}), + (child3_data := {"id": 3, "name": "Three"}), + ], + } + requests_mock.get(parent_url := api.build_url("foo/1"), json=parent_data) + requests_mock.get(child2_url := api.build_url("foo/1/child/2"), json=child2_data) + requests_mock.get(child3_url := api.build_url("foo/1/child/3"), json=child3_data) + + parent = Parent.from_api(parent_data, api) + assert parent.name == "One" + assert parent.children[0].name == "Two" + + # Test that we can still reload the parent object + m_parent_patch = requests_mock.patch( + parent_url, + json={ + **parent_data, + "name": (parent_update := "One Updated"), + }, + ) + parent.name = parent_update + parent.save() + assert m_parent_patch.call_count == 1 + assert m_parent_patch.last_request.json()["name"] == parent_update + + # Test that we can still patch a nested object after its parent was reloaded, + # because we saved the URL context from `from_api()` and reused it on `_reload()`. + m_child2_patch = requests_mock.patch(child2_url, json=child2_data) + m_child3_patch = requests_mock.patch( + child3_url, + json={ + **child3_data, + "name": (child3_update := "Three Updated"), + }, + ) + parent.children[1].name = child3_update + parent.children[1].save() + assert m_child3_patch.call_count == 1 + assert m_child3_patch.last_request.json()["name"] == child3_update + assert parent.children[1].name == child3_update + assert m_child2_patch.call_count == 0 # just to be sure + + def test_delete(requests_mock, create_instance): obj = create_instance() m = requests_mock.delete(obj._url) @@ -81,7 +193,17 @@ def test_delete(requests_mock, create_instance): def test_delete_not_allowed(create_instance): obj = create_instance(allow_delete=False) - with pytest.raises(NotImplementedError): + with pytest.raises(AttributeError): + obj.delete() + + +def test_delete_without_url(create_instance): + """ + Test that if we do not provide context for computing a URL when an instance + is created, we won't be able to delete it later. + """ + obj = create_instance(url="") + with pytest.raises(RuntimeError): obj.delete() @@ -126,4 +248,87 @@ class Inner(AirtableModel): Outer.Inner.Outer = Outer # This will cause RecursionError if we're not careful - update_forward_refs(Outer) + rebuild_models(Outer) + + +def test_restfulmodel__set_url(api, base): + """ + Test that the RestfulModel class generates a URL based on API context. + Also test that RestfulModel puts the full URL context into certain types + of exceptions that occur during URL formatting. + """ + + class Dummy(RestfulModel, url="{base.id}/{dummy.one}/{dummy.two}"): + one: int + two: str + + data = {"one": 1, "two": "2"} + + d = Dummy.from_api(data, api, context={"base": base}) + assert d._url == api.build_url(f"{base.id}/1/2") + + with pytest.raises(KeyError) as exc_info: + Dummy.from_api(data, api) + + assert exc_info.match(r"\('base', \{'dummy': .*\}\)") + + with pytest.raises(AttributeError) as exc_info: + Dummy.from_api(data, api, context={"base": None}) + + assert exc_info.match( + r'"\'NoneType\' object has no attribute \'id\'"' + r", \{'base': None, 'dummy': Dummy\(.*\)\}" + ) + + +def test_datetime_conversion(api, requests_mock): + """ + Test that if an AirtableModel field is specified as a datetime, + and the input data is provided as a str, we'll convert to a datetime + and back to a str when saving. + """ + + class Dummy(CanUpdateModel, url="{self.id}", writable=["timestamp"]): + id: str + timestamp: datetime + + data = {"id": "rec000", "timestamp": "2024-01-08T12:34:56Z"} + obj = Dummy.from_api(data, api) + assert obj.timestamp == datetime(2024, 1, 8, 12, 34, 56, tzinfo=timezone.utc) + m = requests_mock.patch(obj._url, json=data) + obj.save() + assert m.call_count == 1 + assert m.request_history[0].json() == {"timestamp": "2024-01-08T12:34:56.000Z"} + + +@pytest.mark.parametrize( + "attrpath", + [ + "pyairtable.models.webhook.Webhook.last_successful_notification_time", + "pyairtable.models.webhook.Webhook.expiration_time", + "pyairtable.models.comment.Comment.created_time", + "pyairtable.models.comment.Comment.last_updated_time", + "pyairtable.models.webhook.WebhookNotification.timestamp", + "pyairtable.models.webhook.WebhookPayload.timestamp", + "pyairtable.models.audit.AuditLogResponse.events[0].timestamp", + "pyairtable.models.schema.BaseCollaborators.group_collaborators.via_base[0].created_time", + "pyairtable.models.schema.BaseCollaborators.individual_collaborators.via_base[0].created_time", + "pyairtable.models.schema.BaseCollaborators.interfaces['pbdLkNDICXNqxSDhG'].created_time", + "pyairtable.models.schema.BaseCollaborators.interfaces['pbdLkNDICXNqxSDhG'].first_publish_time", + "pyairtable.models.schema.BaseShares.shares[0].created_time", + "pyairtable.models.schema.WorkspaceCollaborators.invite_links.via_base[0].created_time", + "pyairtable.models.schema.EnterpriseInfo.created_time", + "pyairtable.models.schema.WorkspaceCollaborators.created_time", + "pyairtable.models.schema.WorkspaceCollaborators.invite_links.via_base[0].created_time", + "pyairtable.models.schema.UserGroup.created_time", + "pyairtable.models.schema.UserGroup.updated_time", + "pyairtable.models.schema.UserGroup.members[1].created_time", + "pyairtable.models.schema.UserInfo.created_time", + "pyairtable.models.schema.UserInfo.last_activity_time", + ], +) +def test_datetime_models(attrpath, schema_obj): + """ + Test that specific models' fields are correctly converted to datetimes. + """ + assert isinstance(schema_obj(attrpath), datetime) diff --git a/tests/test_models_collaborator.py b/tests/test_models_collaborator.py index a1630475..4f47ab2e 100644 --- a/tests/test_models_collaborator.py +++ b/tests/test_models_collaborator.py @@ -10,7 +10,7 @@ def test_parse(): - user = Collaborator.parse_obj(fake_user_data) + user = Collaborator.model_validate(fake_user_data) assert user.id == fake_user_data["id"] assert user.email == fake_user_data["email"] assert user.name == fake_user_data["name"] diff --git a/tests/test_models_comment.py b/tests/test_models_comment.py index 45564c6f..0cb2487a 100644 --- a/tests/test_models_comment.py +++ b/tests/test_models_comment.py @@ -10,30 +10,14 @@ @pytest.fixture -def comment_json(): - author = fake_user("author") - mentioned = fake_user("mentioned") - return { - "author": author, - "createdTime": NOW, - "id": fake_id("com"), - "lastUpdatedTime": None, - "text": f"Hello, @[{mentioned['id']}]!", - "mentioned": { - mentioned["id"]: { - "displayName": mentioned["name"], - "id": mentioned["id"], - "email": mentioned["email"], - "type": "user", - } - }, - } +def comment_json(sample_json): + return sample_json("Comment") @pytest.fixture def comment(comment_json, table): - url = table.record_url(RECORD_ID, "comments", comment_json["id"]) - return Comment.from_api(table.api, url, comment_json) + record_url = table.urls.record(RECORD_ID) + return Comment.from_api(comment_json, table.api, context={"record_url": record_url}) @pytest.fixture @@ -41,18 +25,23 @@ def comments_url(base, table): return f"https://api.airtable.com/v0/{base.id}/{table.name}/{RECORD_ID}/comments" -def test_parse(comment_json): - Comment.parse_obj(comment_json) +def test_parse(comment): + assert isinstance(comment.created_time, datetime.datetime) + assert isinstance(comment.last_updated_time, datetime.datetime) + assert comment.author.id == "usrLkNDICXNqxSDhG" + assert comment.mentioned["usr00000mentioned"].display_name == "Alice Doe" + assert comment.reactions[0].emoji == "👍" -@pytest.mark.parametrize("attr", ["mentioned", "last_updated_time"]) -def test_missing_attributes(comment_json, attr): +def test_missing_attributes(comment_json): """ Test that we can parse the payload when missing optional values. """ - del comment_json[Comment.__fields__[attr].alias] - comment = Comment.parse_obj(comment_json) - assert getattr(comment, attr) is None + del comment_json["lastUpdatedTime"] + del comment_json["mentioned"] + comment = Comment.model_validate(comment_json) + assert comment.mentioned == {} + assert comment.last_updated_time is None @pytest.mark.parametrize( @@ -80,7 +69,7 @@ def test_save(comment, requests_mock): """ new_text = "This was changed!" mentions = {} - modified = dict(comment.dict(by_alias=True), mentioned=mentions, text=new_text) + modified = dict(comment._raw, mentioned=mentions, text=new_text) m = requests_mock.patch(comment._url, json=modified) comment.text = "Whatever" diff --git a/tests/test_models_schema.py b/tests/test_models_schema.py new file mode 100644 index 00000000..e909c337 --- /dev/null +++ b/tests/test_models_schema.py @@ -0,0 +1,594 @@ +from operator import attrgetter +from typing import List, Optional + +import mock +import pytest + +from pyairtable.models import schema +from pyairtable.models._base import AirtableModel +from pyairtable.testing import fake_id + + +@pytest.fixture +def mock_base_metadata(base, sample_json, requests_mock): + base_json = sample_json("BaseCollaborators") + requests_mock.get(base.urls.meta, json=base_json) + requests_mock.get(base.urls.tables, json=sample_json("BaseSchema")) + requests_mock.get(base.urls.shares, json=sample_json("BaseShares")) + for pbd_id, pbd_json in base_json["interfaces"].items(): + requests_mock.get(base.urls.interface(pbd_id), json=pbd_json) + + +@pytest.mark.parametrize( + "clsname", + [ + "Bases", + "BaseCollaborators", + "BaseSchema", + "TableSchema", + "ViewSchema", + ], +) +def test_parse(sample_json, clsname): + cls = attrgetter(clsname)(schema) + cls.model_validate(sample_json(clsname)) + + +@pytest.mark.parametrize("cls", schema.FieldSchema.__args__) +def test_parse_field(sample_json, cls): + cls.model_validate(sample_json("field_schema/" + cls.__name__)) + + +@pytest.mark.parametrize( + "clsname,method,id_or_name", + [ + ("Bases", "base", "appLkNDICXNqxSDhG"), + ("Bases", "base", "Apartment Hunting"), + ("BaseSchema", "table", "tbltp8DGLhqbUmjK1"), + ("BaseSchema", "table", "Apartments"), + ("TableSchema", "field", "fld1VnoyuotSTyxW1"), + ("TableSchema", "field", "Name"), + ("TableSchema", "view", "viwQpsuEDqHFqegkp"), + ("TableSchema", "view", "Grid view"), + ], +) +def test_find_in_collection(clsname, method, id_or_name, sample_json): + cls = attrgetter(clsname)(schema) + obj = cls.model_validate(sample_json(clsname)) + assert getattr(obj, method)(id_or_name) + + +@pytest.mark.parametrize( + "obj_path, expected_value", + { + "BaseCollaborators.individual_collaborators.via_base[0].permission_level": "create", + "BaseCollaborators.individual_collaborators.via_base[0].user_id": "usrsOEchC9xuwRgKk", + "BaseSchema.tables[0].fields[1].type": "multipleAttachments", + "BaseSchema.tables[0].fields[2].options.inverse_link_field_id": "fldWnCJlo2z6ttT8Y", + "BaseSchema.tables[0].name": "Apartments", + "BaseSchema.tables[0].views[0].type": "grid", + "BaseShares.shares[0].effective_email_domain_allow_list": ["foobar.com"], + "BaseShares.shares[2].state": "disabled", + "EnterpriseInfo.email_domains[0].email_domain": "foobar.com", + "EnterpriseInfo.email_domains[0].is_sso_required": True, + "UserGroup.collaborations.base_collaborations[0].base_id": "appLkNDICXNqxSDhG", + "UserGroup.members[1].user_id": "usrsOEchC9xuwRgKk", + "UserInfo.collaborations.interface_collaborations[0].interface_id": "pbdyGA3PsOziEHPDE", + "UserInfo.is_sso_required": True, + "UserInfo.is_two_factor_auth_enabled": False, + "UserInfo.name": "foo baz", + "WorkspaceCollaborators.base_ids": ["appLkNDICXNqxSDhG", "appSW9R5uCNmRmfl6"], + "WorkspaceCollaborators.invite_links.via_base[0].id": "invJiqaXmPqqAPP99", + }.items(), +) +def test_deserialized_values(obj_path, expected_value, schema_obj): + """ + Spot check that certain values get loaded correctly from JSON into Python. + This is not intended to be comprehensive, just another chance to catch regressions. + """ + assert schema_obj(obj_path) == expected_value + + +class Outer(AirtableModel): + inners: List["Outer.Inner"] + + class Inner(AirtableModel): + id: str + name: str + deleted: Optional[bool] = None + + def find(self, id_or_name): + return schema._find(self.inners, id_or_name) + + +def test_find(): + """ + Test that _find() retrieves an object based on ID or name, + and skips any models that are marked as deleted. + """ + + collection = Outer.model_validate( + { + "inners": [ + {"id": "0001", "name": "One"}, + {"id": "0002", "name": "Two"}, + {"id": "0003", "name": "Three", "deleted": True}, + ] + } + ) + assert collection.find("0001").id == "0001" + assert collection.find("Two").id == "0002" + with pytest.raises(KeyError): + collection.find("0003") + with pytest.raises(KeyError): + collection.find("0004") + + +@pytest.mark.parametrize( + "kind,id", + [ + ("user", "usrsOEchC9xuwRgKk"), + ("group", "ugpR8ZT9KtIgp8Bh3"), + ], +) +def test_base_collaborators__add( + base, kind, id, requests_mock, sample_json, mock_base_metadata +): + """ + Test that we can call base.collaborators().add_{user,group} + to grant access to the base. + """ + method = getattr(base.collaborators(), f"add_{kind}") + m = requests_mock.post(base.urls.collaborators, body="") + method(id, "read") + assert m.call_count == 1 + assert m.last_request.json() == { + "collaborators": [{kind: {"id": id}, "permissionLevel": "read"}] + } + + +@pytest.mark.parametrize( + "kind,id", + [ + ("user", "usrsOEchC9xuwRgKk"), + ("group", "ugpR8ZT9KtIgp8Bh3"), + ], +) +def test_workspace_collaborators__add(api, kind, id, requests_mock, sample_json): + """ + Test that we can call workspace.collaborators().add_{user,group} + to grant access to the workspace. + """ + workspace_json = sample_json("WorkspaceCollaborators") + workspace = api.workspace(workspace_json["id"]) + requests_mock.get(workspace.urls.meta, json=workspace_json) + method = getattr(workspace.collaborators(), f"add_{kind}") + m = requests_mock.post(workspace.urls.collaborators, body="") + method(id, "read") + assert m.call_count == 1 + assert m.last_request.json() == { + "collaborators": [{kind: {"id": id}, "permissionLevel": "read"}] + } + + +@pytest.mark.parametrize( + "name,id", + [ + ("base", "appLkNDICXNqxSDhG"), + ("workspace", "wspmhESAta6clCCwF"), + ], +) +def test_update_collaborator(api, name, id, requests_mock, sample_json): + """ + Test that we can call collaborators().update() to change the permission level + of a user or group on a base or workspace. + """ + target = getattr(api, name)(id) + grp = fake_id("grp") + obj = sample_json(f"{name.capitalize()}Collaborators") + requests_mock.get(api.build_url(f"meta/{name}s/{id}"), json=obj) + m = requests_mock.patch(api.build_url(f"meta/{name}s/{id}/collaborators/{grp}")) + target.collaborators().update(grp, "read") + assert m.call_count == 1 + assert m.last_request.json() == {"permissionLevel": "read"} + + +@pytest.mark.parametrize( + "name,id", + [ + ("base", "appLkNDICXNqxSDhG"), + ("workspace", "wspmhESAta6clCCwF"), + ], +) +def test_remove_collaborator(api, name, id, requests_mock, sample_json): + """ + Test that we can call collaborators().remove() to revoke permissions + from a user or group to a base or workspace. + """ + target = getattr(api, name)(id) + grp = fake_id("grp") + obj = sample_json(f"{name.capitalize()}Collaborators") + requests_mock.get(api.build_url(f"meta/{name}s/{id}"), json=obj) + m = requests_mock.delete(api.build_url(f"meta/{name}s/{id}/collaborators/{grp}")) + target.collaborators().remove(grp) + assert m.call_count == 1 + assert m.last_request.body is None + + +def test_invite_link__delete( + base, + workspace, + requests_mock, + mock_base_metadata, + mock_workspace_metadata, +): + """ + Test that we can revoke an invite link. + """ + for invite_link in [ + base.collaborators().invite_links.via_base[0], + base.collaborators().invite_links.via_workspace[0], + base.collaborators().interfaces["pbdLkNDICXNqxSDhG"].invite_links[0], + workspace.collaborators().invite_links.via_base[0], + workspace.collaborators().invite_links.via_workspace[0], + ]: + endpoint = requests_mock.delete(invite_link._url) + invite_link.delete() + assert endpoint.call_count == 1 + + +@pytest.fixture +def interface_url(base): + return base.urls.interface("pbdLkNDICXNqxSDhG") + + +@pytest.mark.parametrize("kind", ("user", "group")) +def test_add_interface_collaborator( + base, kind, requests_mock, interface_url, mock_base_metadata +): + m = requests_mock.post(f"{interface_url}/collaborators", body="") + interface_schema = base.collaborators().interfaces["pbdLkNDICXNqxSDhG"] + method = getattr(interface_schema, f"add_{kind}") + method("testObjectId", "read") + assert m.call_count == 1 + assert m.last_request.json() == { + "collaborators": [ + { + kind: {"id": "testObjectId"}, + "permissionLevel": "read", + } + ] + } + + +def test_update_interface_collaborator( + base, interface_url, requests_mock, mock_base_metadata +): + m = requests_mock.patch(f"{interface_url}/collaborators/testObjectId") + interface_schema = base.collaborators().interfaces["pbdLkNDICXNqxSDhG"] + interface_schema.update("testObjectId", "read") + assert m.call_count == 1 + assert m.last_request.json() == {"permissionLevel": "read"} + + +def test_remove_interface_collaborator( + base, interface_url, requests_mock, mock_base_metadata +): + m = requests_mock.delete(f"{interface_url}/collaborators/testObjectId") + interface_schema = base.collaborators().interfaces["pbdLkNDICXNqxSDhG"] + interface_schema.remove("testObjectId") + assert m.call_count == 1 + assert m.last_request.body is None + + +@pytest.mark.parametrize( + "target_path", + ( + "BaseCollaborators", + "WorkspaceCollaborators", + "BaseCollaborators.interfaces['pbdLkNDICXNqxSDhG']", + ), +) +@pytest.mark.parametrize("kind", ("user", "group")) +def test_add_collaborator( + target_path, + kind, + schema_obj, + requests_mock, # unused; ensures no network traffic +): + target = schema_obj(target_path) + with mock.patch.object(target.__class__, "add_collaborators") as m: + target.add(kind, "testId", "read") + m.assert_called_once_with([{kind: {"id": "testId"}, "permissionLevel": "read"}]) + + +@pytest.mark.parametrize( + "target_path", + ( + "BaseCollaborators", + "WorkspaceCollaborators", + "BaseCollaborators.interfaces['pbdLkNDICXNqxSDhG']", + ), +) +def test_add_collaborator__invalid_kind( + target_path, + schema_obj, + requests_mock, # unused; ensures no network traffic +): + target = schema_obj(target_path) + with mock.patch.object(target.__class__, "add_collaborators") as m: + with pytest.raises(ValueError): + target.add("asdf", "testId", "read") + assert m.call_count == 0 + + +@pytest.mark.parametrize( + "target_path", + ( + "BaseCollaborators", + "WorkspaceCollaborators", + "BaseCollaborators.interfaces['pbdLkNDICXNqxSDhG']", + ), +) +def test_add_collaborators( + target_path, + schema_obj, + base, + workspace, + requests_mock, +): + target = schema_obj(target_path, context={"base": base, "workspace": workspace}) + requests_mock.get(target._url, json=target._raw) + m = requests_mock.post(target._url + "/collaborators") + target.add_collaborators([1, 2, 3, 4]) + assert m.call_count == 1 + assert m.last_request.json() == {"collaborators": [1, 2, 3, 4]} + + +@pytest.mark.parametrize( + "expr,expected_url", + [ + ( + "base.collaborators()", + "meta/bases/appLkNDICXNqxSDhG", + ), + ( + "base.collaborators().interfaces['pbdLkNDICXNqxSDhG']", + "meta/bases/appLkNDICXNqxSDhG/interfaces/pbdLkNDICXNqxSDhG", + ), + ( + "base.collaborators().invite_links.via_base[0]", + "meta/bases/appLkNDICXNqxSDhG/invites/invJiqaXmPqq6Ec87", + ), + ( + "base.collaborators().invite_links.via_workspace[0]", + "meta/workspaces/wspmhESAta6clCCwF/invites/invJiqaXmPqq6Ec99", + ), + ( + "base.collaborators().interfaces['pbdLkNDICXNqxSDhG'].invite_links[0]", + "meta/bases/appLkNDICXNqxSDhG/interfaces/pbdLkNDICXNqxSDhG/invites/invJiqaXmPqq6ABCD", + ), + ( + "workspace.collaborators().invite_links.via_base[0]", + "meta/bases/appLkNDICXNqxSDhG/invites/invJiqaXmPqqAPP99", + ), + ( + "workspace.collaborators().invite_links.via_workspace[0]", + "meta/workspaces/wspmhESAta6clCCwF/invites/invJiqaXmPqqWSP00", + ), + ( + "table.schema()", + "meta/bases/appLkNDICXNqxSDhG/tables/tbltp8DGLhqbUmjK1", + ), + ( + "table.schema().field('fld1VnoyuotSTyxW1')", + "meta/bases/appLkNDICXNqxSDhG/tables/tbltp8DGLhqbUmjK1/fields/fld1VnoyuotSTyxW1", + ), + ( + "table.schema().view('viwQpsuEDqHFqegkp')", + "meta/bases/appLkNDICXNqxSDhG/views/viwQpsuEDqHFqegkp", + ), + ], +) +def test_restful_urls( + expr, + expected_url, + api, + base, + workspace, + mock_base_metadata, # unused; ensures no network traffic + mock_workspace_metadata, # unused; ensures no network traffic +): + """ + Test that the URLs for RestfulModels are generated correctly. + """ + table = base.table("tbltp8DGLhqbUmjK1") + obj = eval(expr, None, {"base": base, "table": table, "workspace": workspace}) + assert obj._url == api.build_url(expected_url) + + +@pytest.fixture +def base_share(base, mock_base_metadata) -> schema.BaseShares.Info: + return base.shares()[0] + + +def test_share__enable(base_share, requests_mock): + m = requests_mock.patch(base_share._url) + base_share.enable() + assert m.call_count == 1 + assert m.last_request.json() == {"state": "enabled"} + + +def test_share__disable(base_share, requests_mock): + m = requests_mock.patch(base_share._url) + base_share.disable() + assert m.call_count == 1 + assert m.last_request.json() == {"state": "disabled"} + + +def test_share__delete(base_share, requests_mock): + m = requests_mock.delete(base_share._url) + base_share.delete() + assert m.call_count == 1 + assert m.last_request.body is None + + +def test_workspace_restrictions(workspace, mock_workspace_metadata, requests_mock): + restrictions = workspace.collaborators().restrictions + restrictions.invite_creation = "unrestricted" + restrictions.share_creation = "onlyOwners" + + m = requests_mock.post(restrictions._url) + restrictions.save() + assert m.call_count == 1 + assert m.last_request.json() == { + "inviteCreationRestriction": "unrestricted", + "shareCreationRestriction": "onlyOwners", + } + + +def test_save_date_dependency_settings(api, base, requests_mock): + table_id = fake_id("tbl") + + from pyairtable import orm + + class TaskModel(orm.Model): + # Used to test that add_date_dependency accepts an ORM field. + class Meta: + api_key = api.api_key + base_id = base.id + table_name = "Tasks" + + duration = orm.fields.IntegerField("Duration") + + obj = { + "id": table_id, + "name": "Tasks", + "description": "", + "primaryFieldId": "fldName", + "views": [], + "fields": [ + { + "id": "fldName", + "name": "Name", + "type": "singleLineText", + "options": {}, + }, + { + "id": "fldDepends", + "name": "Depends", + "type": "multipleRecordLinks", + "options": { + "isReversed": False, + "linkedTableId": table_id, + "prefersSingleRecordLink": False, + "inverseLinkFieldId": None, + "viewIdForRecordSelection": None, + }, + }, + { + "id": "fldStartDate", + "name": "Start Date", + "type": "date", + "options": {}, + }, + { + "id": "fldEndDate", + "name": "End Date", + "type": "date", + "options": {}, + }, + { + "id": "fldDuration", + "name": "Duration", + "type": "number", + "options": {}, + }, + ], + } + table_schema = schema.TableSchema.from_api(obj, api, context={"base": base}) + m = requests_mock.patch(table_schema._url, json=obj) + table_schema.set_date_dependency( + start_date_field="fldStartDate", + end_date_field="End Date", + duration_field=TaskModel.duration, + rescheduling_mode="none", + ) + assert m.call_count == 0 + + table_schema.save() + assert m.call_count == 1 + assert m.last_request.json() == { + "name": "Tasks", + "description": "", + "dateDependencySettings": { + "startDateFieldId": "fldStartDate", + "endDateFieldId": "fldEndDate", + "durationFieldId": "fldDuration", + "reschedulingMode": "none", + "isEnabled": True, + "shouldSkipWeekendsAndHolidays": False, + "holidays": [], + }, + } + + +def test_save_date_dependency_settings__invalid_field(table_schema): + with pytest.raises(KeyError, match=r"^'invalid_field'$"): + table_schema.set_date_dependency( + start_date_field="Name", + end_date_field="Name", + duration_field="Name", + predecessor_field="invalid_field", + rescheduling_mode="none", + ) + + +def test_field_type_enum(): + """ + Test that FieldType enum contains all expected field types. + """ + # Test that enum inherits from str + assert isinstance(schema.FieldType.SINGLE_LINE_TEXT, str) + + # Test that enum can be used in string comparisons + assert schema.FieldType.SINGLE_LINE_TEXT == "singleLineText" + + # Test that all field config types have corresponding enum values + expected_types = { + "aiText", + "autoNumber", + "barcode", + "button", + "checkbox", + "count", + "createdBy", + "createdTime", + "currency", + "date", + "dateTime", + "duration", + "email", + "externalSyncSource", + "formula", + "lastModifiedBy", + "lastModifiedTime", + "manualSort", + "multilineText", + "multipleAttachments", + "multipleCollaborators", + "multipleLookupValues", + "multipleRecordLinks", + "multipleSelects", + "number", + "percent", + "phoneNumber", + "rating", + "richText", + "rollup", + "singleCollaborator", + "singleLineText", + "singleSelect", + "url", + } + assert expected_types == {member.value for member in schema.FieldType} diff --git a/tests/test_models_webhook.py b/tests/test_models_webhook.py index 5a83a76b..6e9d2f20 100644 --- a/tests/test_models_webhook.py +++ b/tests/test_models_webhook.py @@ -12,11 +12,7 @@ @pytest.fixture def webhook(sample_json, base, api): webhook_json = sample_json("Webhook") - return Webhook.from_api( - api=api, - url=f"{base.webhooks_url}/{webhook_json['id']}", - obj=webhook_json, - ) + return Webhook.from_api(webhook_json, api, context=base) @pytest.fixture @@ -35,7 +31,7 @@ def payload_json(sample_json): ) def test_parse(sample_json, clsname): cls = attrgetter(clsname)(pyairtable.models.webhook) - cls.parse_obj(sample_json(clsname)) + cls.model_validate(sample_json(clsname)) @pytest.mark.parametrize( @@ -71,7 +67,7 @@ def test_delete(webhook: Webhook, requests_mock): def test_error_payload(payload_json): payload_json.update({"error": True, "code": "INVALID_HOOK"}) - payload = WebhookPayload.parse_obj(payload_json) + payload = WebhookPayload.model_validate(payload_json) assert payload.error is True assert payload.error_code == "INVALID_HOOK" @@ -186,14 +182,16 @@ def test_notification_from_request(secret): "timestamp": "2022-02-01T21:25:05.663Z", } header = ( - "hmac-sha256-e26da696a90933647bddc83995c3e1e3bb1c3d8ce1ff61cb7469767d50b2b2d4" + "hmac-sha256=e26da696a90933647bddc83995c3e1e3bb1c3d8ce1ff61cb7469767d50b2b2d4" ) body = json.dumps(notification_json) notification = WebhookNotification.from_request(body, header, secret) assert notification.base.id == "app00000000000000" assert notification.webhook.id == "ach00000000000000" - assert notification.timestamp == "2022-02-01T21:25:05.663Z" + assert notification.timestamp == datetime.datetime( + 2022, 2, 1, 21, 25, 5, 663000, tzinfo=datetime.timezone.utc + ) with pytest.raises(ValueError): WebhookNotification.from_request("[1,2,3]", header, secret) diff --git a/tests/test_orm.py b/tests/test_orm.py index b0128596..9820a107 100644 --- a/tests/test_orm.py +++ b/tests/test_orm.py @@ -1,5 +1,6 @@ import re -from datetime import datetime +from datetime import datetime, timezone +from functools import partial from operator import itemgetter from unittest import mock @@ -10,12 +11,15 @@ from pyairtable.orm import Model from pyairtable.orm import fields as f from pyairtable.testing import fake_meta, fake_record +from pyairtable.utils import datetime_to_iso_str + +NOW = datetime.now().isoformat() + "Z" class Address(Model): Meta = fake_meta(table_name="Address") street = f.TextField("Street") - number = f.TextField("Number") + number = f.IntegerField("Number") class Contact(Model): @@ -23,12 +27,25 @@ class Contact(Model): first_name = f.TextField("First Name") last_name = f.TextField("Last Name") email = f.EmailField("Email") - is_registered = f.CheckboxField("Registered") + is_registered = f.CheckboxField("Registered?") address = f.LinkField("Link", Address, lazy=False) birthday = f.DateField("Birthday") created_at = f.CreatedTimeField("Created At") +@pytest.fixture +def contact_record(): + return fake_record( + { + "First Name": "John", + "Last Name": "Doe", + "Email": "john@example.com", + "Registered?": True, + "Birthday": "1970-01-01", + } + ) + + def test_model_basics(): contact = Contact( first_name="Gui", @@ -44,11 +61,12 @@ def test_model_basics(): # save with mock.patch.object(Table, "create") as m_save: - m_save.return_value = {"id": "id", "createdTime": "time"} - contact.save() + m_save.return_value = {"id": "id", "createdTime": NOW} + assert contact.save().created assert m_save.called assert contact.id == "id" + assert contact.created_time.tzinfo is timezone.utc # delete with mock.patch.object(Table, "delete") as m_delete: @@ -63,7 +81,7 @@ def test_model_basics(): record = contact.to_record() assert record["id"] == contact.id - assert record["createdTime"] == contact.created_time + assert record["createdTime"] == datetime_to_iso_str(contact.created_time) assert record["fields"]["First Name"] == contact.first_name @@ -73,7 +91,7 @@ def test_unsupplied_fields(): """ a = Address() assert a.number is None - assert a.street is None + assert a.street == "" def test_null_fields(): @@ -82,14 +100,14 @@ def test_null_fields(): """ a = Address(number=None, street=None) assert a.number is None - assert a.street is None + assert a.street == "" def test_first(): with mock.patch.object(Table, "first") as m_first: m_first.return_value = { "id": "recwnBLPIeQJoYVt4", - "createdTime": "", + "createdTime": NOW, "fields": { "First Name": "X", "Created At": "2014-09-05T12:34:56.000Z", @@ -108,13 +126,63 @@ def test_first_none(): assert contact is None +def test_all_with_comment_count(): + with mock.patch.object(Table, "all") as m_all: + m_all.return_value = [ + { + "id": "rec1", + "createdTime": NOW, + "fields": {"First Name": "Alice"}, + "commentCount": 5, + }, + { + "id": "rec2", + "createdTime": NOW, + "fields": {"First Name": "Bob"}, + "commentCount": 0, + }, + ] + contacts = Contact.all(count_comments=True) + + # Verify count_comments was passed to Table.all() + m_all.assert_called_once() + assert m_all.call_args.kwargs.get("count_comments") is True + + # Verify comment_count is populated on instances + assert len(contacts) == 2 + assert contacts[0].comment_count == 5 + assert contacts[1].comment_count == 0 + + +def test_first_with_comment_count(): + with mock.patch.object(Table, "first") as m_first: + m_first.return_value = { + "id": "rec1", + "createdTime": NOW, + "fields": {"First Name": "Alice"}, + "commentCount": 3, + } + contact = Contact.first(count_comments=True) + + # Verify count_comments was passed to Table.first() + m_first.assert_called_once() + assert m_first.call_args.kwargs.get("count_comments") is True + + # Verify comment_count is populated + assert contact.comment_count == 3 + + def test_from_record(): # Fetch = True with mock.patch.object(Table, "get") as m_get: m_get.return_value = { "id": "recwnBLPIeQJoYVt4", - "createdTime": "", - "fields": {"First Name": "X", "Created At": "2014-09-05T12:34:56.000Z"}, + "createdTime": NOW, + "fields": { + "First Name": "X", + "Birthday": None, + "Created At": "2014-09-05T12:34:56.000Z", + }, } contact = Contact.from_id("recwnBLPIeQJoYVt4") @@ -130,35 +198,56 @@ def test_from_record(): assert not contact.first_name == "X" -def test_readonly_field_not_saved(): +def test_unmodified_field_not_saved(contact_record): """ - Test that we do not attempt to save readonly fields to the API, - but we can retrieve readonly fields and set them on instantiation. + Test that we do not attempt to save fields to the API if they are unchanged. """ + contact = Contact.from_record(contact_record) + mock_update_contact = partial( + mock.patch.object, Table, "update", return_value=contact_record + ) - record = { - "id": "recwnBLPIeQJoYVt4", - "createdTime": datetime.utcnow().isoformat(), - "fields": { - "Birthday": "1970-01-01", - "Age": 57, - }, - } + # Do not call update() if the record is unchanged + with mock_update_contact() as m_update: + result = contact.save() + assert not (result.created or result.updated) + m_update.assert_not_called() - contact = Contact.from_record(record) - with mock.patch.object(Table, "update") as m_update: - m_update.return_value = record - contact.birthday = datetime(2000, 1, 1) + # By default, only pass fields which were changed to the API + with mock_update_contact() as m_update: + contact.email = "john.doe@example.com" contact.save() + m_update.assert_called_once_with( + contact.id, + {"Email": "john.doe@example.com"}, + typecast=True, + use_field_ids=False, + ) + + # Once saved, the field is no longer marked as changed + with mock_update_contact() as m_update: + contact.save() + m_update.assert_not_called() - # We should not pass 'Age' to the API - m_update.assert_called_once_with( - contact.id, {"Birthday": "2000-01-01"}, typecast=True - ) + # We can explicitly pass all fields to the API + with mock_update_contact() as m_update: + contact.save(force=True) + m_update.assert_called_once_with( + contact.id, + { + "First Name": "John", + "Last Name": "Doe", + "Email": "john.doe@example.com", + "Registered?": True, + "Birthday": "1970-01-01", + }, + typecast=True, + use_field_ids=False, + ) def test_linked_record(): - record = {"id": "recFake", "createdTime": "", "fields": {"Street": "A"}} + record = {"id": "recFake", "createdTime": NOW, "fields": {"Street": "A"}} address = Address.from_id("recFake", fetch=False) # Id Reference @@ -167,7 +256,7 @@ def test_linked_record(): assert not contact.address[0].street with Mocker() as mock: - url = Address.get_table().record_url(address.id) + url = Address.meta.table.urls.record(address.id) mock.get(url, status_code=200, json=record) contact.address[0].fetch() @@ -184,13 +273,13 @@ def test_linked_record_can_be_saved(requests_mock, access_linked_records): record IDs into instances of the model. This could interfere with save(), so this test ensures we don't regress the capability. """ - address_json = fake_record(Number="123", Street="Fake St") + address_json = fake_record(Number=123, Street="Fake St") address_id = address_json["id"] - address_url_re = re.escape(Address.get_table().url + "?filterByFormula=") + address_url_re = re.escape(Address.meta.table.urls.records + "?filterByFormula=") contact_json = fake_record(Email="alice@example.com", Link=[address_id]) contact_id = contact_json["id"] - contact_url = Contact.get_table().record_url(contact_id) - contact_url_re = re.escape(Contact.get_table().url + "?filterByFormula=") + contact_url = Contact.meta.table.urls.record(contact_id) + contact_url_re = re.escape(Contact.meta.table.urls.records + "?filterByFormula=") requests_mock.get(re.compile(address_url_re), json={"records": [address_json]}) requests_mock.get(re.compile(contact_url_re), json={"records": [contact_json]}) requests_mock.get(contact_url, json=contact_json) @@ -201,13 +290,14 @@ def test_linked_record_can_be_saved(requests_mock, access_linked_records): if access_linked_records: assert contact.address[0].id == address_id - contact.save() + contact.save(force=True) assert mock_save.last_request.json() == { "fields": { "Email": "alice@example.com", "Link": [address_id], }, "typecast": True, + "returnFieldsByFieldId": False, } @@ -241,26 +331,26 @@ def test_undeclared_field(requests_mock, test_case): """ record = fake_record( - Number="123", + Number=123, Street="Fake St", City="Springfield", State="IL", ) requests_mock.get( - Address.get_table().url, + Address.meta.table.urls.records, status_code=200, json={"records": [record]}, ) requests_mock.get( - Address.get_table().record_url(record["id"]), + Address.meta.table.urls.record(record["id"]), status_code=200, json=record, ) _, get_model_instance = test_case instance = get_model_instance(Address, record["id"]) - assert instance.to_record()["fields"] == {"Number": "123", "Street": "Fake St"} + assert instance.to_record()["fields"] == {"Number": 123, "Street": "Fake St"} @mock.patch("pyairtable.Table.batch_create") @@ -270,19 +360,19 @@ def test_batch_save(mock_update, mock_create): Test that we can pass multiple unsaved Model instances (or dicts) to batch_save and it will create or update them all in as few requests as possible. """ - addr1 = Address(number="123", street="Fake St") - addr2 = Address(number="456", street="Fake St") + addr1 = Address(number=123, street="Fake St") + addr2 = Address(number=456, street="Fake St") addr3 = Address.from_record( { "id": "recExistingRecord", - "createdTime": datetime.utcnow().isoformat(), - "fields": {"Number": "789", "Street": "Fake St"}, + "createdTime": datetime.now(timezone.utc).isoformat(), + "fields": {"Number": 789, "Street": "Fake St"}, } ) mock_create.return_value = [ - fake_record(id="abc", Number="123", Street="Fake St"), - fake_record(id="def", Number="456", Street="Fake St"), + fake_record(id="abc", Number=123, Street="Fake St"), + fake_record(id="def", Number=456, Street="Fake St"), ] # Just like model.save(), Model.batch_save() will set IDs on new records. @@ -293,22 +383,42 @@ def test_batch_save(mock_update, mock_create): mock_create.assert_called_once_with( [ - {"Number": "123", "Street": "Fake St"}, - {"Number": "456", "Street": "Fake St"}, + {"Number": 123, "Street": "Fake St"}, + {"Number": 456, "Street": "Fake St"}, ], typecast=True, + use_field_ids=False, ) mock_update.assert_called_once_with( [ { "id": "recExistingRecord", - "fields": {"Number": "789", "Street": "Fake St"}, + "fields": {"Number": 789, "Street": "Fake St"}, }, ], typecast=True, + use_field_ids=False, ) +@mock.patch("pyairtable.Table.batch_create") +@mock.patch("pyairtable.Table.batch_update") +def test_batch_save__only_create(mock_update, mock_create): + Address.batch_save([Address(), Address()]) + assert mock_create.call_count == 1 + assert mock_update.call_count == 0 + + +@mock.patch("pyairtable.Table.batch_create") +@mock.patch("pyairtable.Table.batch_update") +def test_batch_save__only_update(mock_update, mock_create): + a1 = Address.from_record(fake_record()) + a2 = Address.from_record(fake_record()) + Address.batch_save([a1, a2]) + assert mock_create.call_count == 0 + assert mock_update.call_count == 1 + + @mock.patch("pyairtable.Table.batch_create") @mock.patch("pyairtable.Table.batch_update") def test_batch_save__invalid_class(mock_update, mock_create): @@ -361,8 +471,8 @@ def test_batch_delete__unsaved_record(mock_delete): receives any models which have not been created yet. """ addresses = [ - Address.from_record(fake_record(Number="1", Street="Fake St")), - Address(number="2", street="Fake St"), + Address.from_record(fake_record(Number=1, Street="Fake St")), + Address(number=2, street="Fake St"), ] with pytest.raises(ValueError): Address.batch_delete(addresses) diff --git a/tests/test_orm_fields.py b/tests/test_orm_fields.py index 7d3763a0..4b0f1f1e 100644 --- a/tests/test_orm_fields.py +++ b/tests/test_orm_fields.py @@ -1,10 +1,16 @@ import datetime import operator import re +from unittest import mock import pytest +from requests_mock import NoMockAddress +import pyairtable.exceptions +from pyairtable.formulas import OR, RECORD_ID +from pyairtable.models import schema from pyairtable.orm import fields as f +from pyairtable.orm.lists import AttachmentsList from pyairtable.orm.model import Model from pyairtable.testing import ( fake_attachment, @@ -13,11 +19,23 @@ fake_record, fake_user, ) +from pyairtable.utils import datetime_to_iso_str + +try: + from pytest import Mark as _PytestMark +except ImportError: + # older versions of pytest don't expose pytest.Mark directly + from _pytest.mark import Mark as _PytestMark + DATE_S = "2023-01-01" DATE_V = datetime.date(2023, 1, 1) DATETIME_S = "2023-04-12T09:30:00.000Z" -DATETIME_V = datetime.datetime(2023, 4, 12, 9, 30, 0) +DATETIME_V = datetime.datetime(2023, 4, 12, 9, 30, 0, tzinfo=datetime.timezone.utc) + + +class Dummy(Model): + Meta = fake_meta() def test_field(): @@ -33,6 +51,16 @@ class T: del t.name +def test_description(): + class T: + name = f.Field("Name") + + T.other = f.Field("Other") + + assert T.name._description == "T.name" + assert T.other._description == "'Other' field" + + @pytest.mark.parametrize( "instance,expected", [ @@ -60,6 +88,12 @@ class T: f.LinkField("Records", type("TestModel", (Model,), {"Meta": fake_meta()})), "LinkField('Records', model=, validate_type=True, readonly=False, lazy=False)", ), + ( + f.SingleLinkField( + "Records", type("TestModel", (Model,), {"Meta": fake_meta()}) + ), + "SingleLinkField('Records', model=, validate_type=True, readonly=False, lazy=False, raise_if_many=False)", + ), ], ) def test_repr(instance, expected): @@ -71,6 +105,7 @@ def test_repr(instance, expected): argvalues=[ (f.Field, None), (f.CheckboxField, False), + (f.TextField, ""), (f.LookupField, []), (f.AttachmentsField, []), (f.MultipleCollaboratorsField, []), @@ -90,10 +125,16 @@ class T(Model): t = T() assert t.the_field == default_value + t = T.from_record(fake_record({"Field Name": None})) + assert t.the_field == default_value + # Mapping from types to a test value for that type. TYPE_VALIDATION_TEST_VALUES = { - **{t: t() for t in (str, bool, list, dict)}, + str: "some value", + bool: False, + list: [], + dict: {}, int: 1, # cannot use int() because RatingField requires value >= 1 float: 1.0, # cannot use float() because RatingField requires value >= 1 datetime.date: datetime.date.today(), @@ -106,28 +147,48 @@ class T(Model): "test_case", [ (f.Field, tuple(TYPE_VALIDATION_TEST_VALUES)), - (f.TextField, str), - (f.IntegerField, int), - (f.RichTextField, str), - (f.DatetimeField, datetime.datetime), - (f.TextField, str), - (f.CheckboxField, bool), + (f.AttachmentsField, list), (f.BarcodeField, dict), - (f.NumberField, (int, float)), - (f.PhoneNumberField, str), + (f.CheckboxField, bool), + (f.CollaboratorField, dict), + (f.CurrencyField, (int, float)), + (f.DateField, (datetime.date, datetime.datetime)), + (f.DatetimeField, datetime.datetime), (f.DurationField, datetime.timedelta), - (f.RatingField, int), - (f.UrlField, str), + (f.EmailField, str), + (f.FloatField, float), + (f.IntegerField, int), + (f.MultilineTextField, str), + (f.MultipleCollaboratorsField, list), (f.MultipleSelectField, list), + (f.NumberField, (int, float)), (f.PercentField, (int, float)), - (f.DateField, (datetime.date, datetime.datetime)), - (f.FloatField, float), - (f.CollaboratorField, dict), + (f.PhoneNumberField, str), + (f.RatingField, int), + (f.RichTextField, str), (f.SelectField, str), - (f.EmailField, str), - (f.AttachmentsField, list), - (f.MultipleCollaboratorsField, list), - (f.CurrencyField, (int, float)), + (f.SingleLineTextField, str), + (f.TextField, str), + (f.UrlField, str), + (f.RequiredBarcodeField, dict), + (f.RequiredCollaboratorField, dict), + (f.RequiredCurrencyField, (int, float)), + (f.RequiredDateField, (datetime.date, datetime.datetime)), + (f.RequiredDatetimeField, datetime.datetime), + (f.RequiredDurationField, datetime.timedelta), + (f.RequiredEmailField, str), + (f.RequiredFloatField, float), + (f.RequiredIntegerField, int), + (f.RequiredMultilineTextField, str), + (f.RequiredNumberField, (int, float)), + (f.RequiredPercentField, (int, float)), + (f.RequiredPhoneNumberField, str), + (f.RequiredRatingField, int), + (f.RequiredRichTextField, str), + (f.RequiredSelectField, str), + (f.RequiredSingleLineTextField, str), + (f.RequiredTextField, str), + (f.RequiredUrlField, str), ], ids=operator.itemgetter(0), ) @@ -207,16 +268,21 @@ class Container(Model): argnames="test_case", argvalues=[ # If a 2-tuple, the API and ORM values should be identical. + (f.AITextField, {"state": "empty", "isStale": True, "value": None}), (f.AutoNumberField, 1), (f.CountField, 1), (f.ExternalSyncSourceField, "Source"), (f.ButtonField, {"label": "Click me!"}), (f.LookupField, ["any", "values"]), - # If a 3-tuple, we should be able to convert API -> ORM values. (f.CreatedByField, fake_user()), - (f.CreatedTimeField, DATETIME_S, DATETIME_V), (f.LastModifiedByField, fake_user()), + (f.ManualSortField, "fcca"), + # If a 3-tuple, we should be able to convert API -> ORM values. + (f.CreatedTimeField, DATETIME_S, DATETIME_V), (f.LastModifiedTimeField, DATETIME_S, DATETIME_V), + # We also want to test the not-null versions of these fields + (f.RequiredAITextField, {"state": "empty", "isStale": True, "value": None}), + (f.RequiredCountField, 1), ], ids=operator.itemgetter(0), ) @@ -249,6 +315,8 @@ class T(Model): # If a 2-tuple, the API and ORM values should be identical. (f.Field, object()), # accepts any value, but Airtable API *will* complain (f.TextField, "name"), + (f.SingleLineTextField, "name"), + (f.MultilineTextField, "some\nthing\nbig"), (f.EmailField, "x@y.com"), (f.NumberField, 1), (f.NumberField, 1.5), @@ -265,12 +333,33 @@ class T(Model): (f.PercentField, 0.5), (f.PhoneNumberField, "+49 40-349180"), (f.RichTextField, "Check out [Airtable](www.airtable.com)"), + (f.SelectField, ""), (f.SelectField, "any value"), (f.UrlField, "www.airtable.com"), + (f.RequiredNumberField, 1), + (f.RequiredNumberField, 1.5), + (f.RequiredIntegerField, 1), + (f.RequiredFloatField, 1.5), + (f.RequiredRatingField, 1), + (f.RequiredCurrencyField, 1.05), + (f.RequiredCollaboratorField, {"id": "usrFakeUserId", "email": "x@y.com"}), + (f.RequiredBarcodeField, {"type": "upce", "text": "084114125538"}), + (f.RequiredPercentField, 0.5), + (f.RequiredSelectField, "any value"), + (f.RequiredEmailField, "any value"), + (f.RequiredPhoneNumberField, "any value"), + (f.RequiredRichTextField, "any value"), + (f.RequiredTextField, "any value"), + (f.RequiredSingleLineTextField, "any value"), + (f.RequiredMultilineTextField, "any value"), + (f.RequiredUrlField, "any value"), # If a 3-tuple, we should be able to convert API -> ORM values. (f.DateField, DATE_S, DATE_V), - (f.DurationField, 100.5, datetime.timedelta(seconds=100, microseconds=500000)), (f.DatetimeField, DATETIME_S, DATETIME_V), + (f.DurationField, 100.5, datetime.timedelta(seconds=100, microseconds=500000)), + (f.RequiredDateField, DATE_S, DATE_V), + (f.RequiredDatetimeField, DATETIME_S, DATETIME_V), + (f.RequiredDurationField, 100, datetime.timedelta(seconds=100)), ], ids=operator.itemgetter(0), ) @@ -302,18 +391,125 @@ class T(Model): assert existing_obj.the_field == orm_value +@pytest.mark.parametrize( + "field_type", + [ + f.Field, + f.AITextField, + f.AttachmentsField, + f.BarcodeField, + f.CheckboxField, + f.CollaboratorField, + f.CountField, + f.CurrencyField, + f.DateField, + f.DatetimeField, + f.DurationField, + f.EmailField, + f.ExternalSyncSourceField, + f.FloatField, + f.IntegerField, + f.LastModifiedByField, + f.LastModifiedTimeField, + f.LookupField, + f.ManualSortField, + f.MultilineTextField, + f.MultipleCollaboratorsField, + f.MultipleSelectField, + f.NumberField, + f.NumberField, + f.PercentField, + f.PhoneNumberField, + f.RatingField, + f.RichTextField, + f.SelectField, + f.SingleLineTextField, + f.TextField, + f.UrlField, + ], +) +def test_accepts_null(field_type): + """ + Test field types that allow null values from Airtable. + """ + + class T(Model): + Meta = fake_meta() + the_field = field_type("Field Name") + + obj = T() + assert not obj.the_field + + +@pytest.mark.parametrize( + "field_type", + [ + f.AutoNumberField, + f.ButtonField, + f.CreatedByField, + f.CreatedTimeField, + f.RequiredAITextField, + f.RequiredBarcodeField, + f.RequiredCollaboratorField, + f.RequiredCountField, + f.RequiredCurrencyField, + f.RequiredDateField, + f.RequiredDatetimeField, + f.RequiredDurationField, + f.RequiredEmailField, + f.RequiredFloatField, + f.RequiredIntegerField, + f.RequiredMultilineTextField, + f.RequiredNumberField, + f.RequiredPercentField, + f.RequiredPhoneNumberField, + f.RequiredRatingField, + f.RequiredRichTextField, + f.RequiredSelectField, + f.RequiredSingleLineTextField, + f.RequiredTextField, + f.RequiredUrlField, + ], +) +def test_rejects_null(field_type): + """ + Test field types that do not allow null values from Airtable. + """ + + class T(Model): + Meta = fake_meta() + the_field = field_type("Field Name") + + obj = T() + with pytest.raises(pyairtable.exceptions.MissingValueError): + obj.the_field + with pytest.raises(pyairtable.exceptions.MissingValueError): + obj.the_field = None + with pytest.raises(pyairtable.exceptions.MissingValueError): + T(the_field=None) + + def test_completeness(): """ Ensure that we test conversion of all readonly and writable fields. """ - assert_all_fields_tested_by(test_writable_fields, test_readonly_fields) + assert_all_fields_tested_by( + test_writable_fields, + test_readonly_fields, + exclude=(f.LinkField, f.SingleLinkField), + ) assert_all_fields_tested_by( test_type_validation, - exclude=f.READONLY_FIELDS | {f.LinkField}, + exclude=f.READONLY_FIELDS | {f.LinkField, f.SingleLinkField}, + ) + assert_all_fields_tested_by( + test_accepts_null, + test_rejects_null, + exclude={f.LinkField, f.SingleLinkField}, ) -def assert_all_fields_tested_by(*test_fns, exclude=(f.Field, f.LinkField)): +def assert_all_fields_tested_by(*test_fns, exclude=()): """ Allows meta-tests that fail if any new Field classes appear in pyairtable.orm.fields which are not covered by one of a few basic tests. This is intended to help remind @@ -321,7 +517,7 @@ def assert_all_fields_tested_by(*test_fns, exclude=(f.Field, f.LinkField)): """ def extract_fields(obj): - if isinstance(obj, pytest.Mark): + if isinstance(obj, _PytestMark): yield from [*extract_fields(obj.args), *extract_fields(obj.kwargs)] elif isinstance(obj, str): pass @@ -338,7 +534,7 @@ def extract_fields(obj): field_class for test_function in test_fns for pytestmark in getattr(test_function, "pytestmark", []) - if isinstance(pytestmark, pytest.Mark) and pytestmark.name == "parametrize" + if isinstance(pytestmark, _PytestMark) and pytestmark.name == "parametrize" for field_class in extract_fields(pytestmark) if field_class not in exclude } @@ -386,18 +582,28 @@ class T(Model): assert T.from_record(fake_record(Fld=None)).the_field == [] -def test_list_field_with_invalid_type(): +@pytest.mark.parametrize( + "field_class,invalid_value", + [ + (f._ListField, object()), + (f.AttachmentsField, [1, 2, 3]), + (f.MultipleCollaboratorsField, [1, 2, 3]), + (f.MultipleSelectField, [{"complex": "type"}]), + ], +) +def test_list_field_with_invalid_type(field_class, invalid_value): """ - Ensure that a ListField represents a null value as an empty list. + Ensure that a ListField raises TypeError when given a non-list, + or a list of objects that don't match `contains_type`. """ class T(Model): Meta = fake_meta() - the_field = f._ListField("Field Name", str) + the_field = field_class("Field Name", str) obj = T.from_record(fake_record()) with pytest.raises(TypeError): - obj.the_field = object() + obj.the_field = invalid_value def test_list_field_with_string(): @@ -414,12 +620,13 @@ class T: t.items = "hello!" -def test_link_field_must_link_to_model(): +@pytest.mark.parametrize("cls", (f.LinkField, f.SingleLinkField)) +def test_link_field_must_link_to_model(cls): """ Tests that a LinkField cannot link to an arbitrary type. """ with pytest.raises(TypeError): - f.LinkField("Field Name", model=dict) + cls("Field Name", model=dict) def test_link_field(): @@ -437,6 +644,8 @@ class Author(Model): collection = [Book(), Book(), Book()] author = Author() author.books = collection + assert isinstance(author._fields["Books"], f.ChangeTrackingList) + assert author.books == collection with pytest.raises(TypeError): @@ -449,10 +658,6 @@ class Author(Model): author.books = -1 -class Dummy(Model): - Meta = fake_meta() - - def test_link_field__linked_model(): """ Test the various ways of specifying a linked model for the LinkField. @@ -506,15 +711,12 @@ def test_link_field__cycle(requests_mock): rec_b = {"id": id_b, "createdTime": DATETIME_S, "fields": {"Friends": [id_c]}} rec_c = {"id": id_c, "createdTime": DATETIME_S, "fields": {"Friends": [id_a]}} - requests_mock.get(Person.get_table().record_url(id_a), json=rec_a) + requests_mock.get(Person.meta.table.urls.record(id_a), json=rec_a) a = Person.from_id(id_a) + url = Person.meta.table.urls.records for record in (rec_a, rec_b, rec_c): - url_re = re.compile( - re.escape(Person.get_table().url + "?filterByFormula=") - + ".*" - + record["id"] - ) + url_re = re.compile(re.escape(f"{url}?filterByFormula=") + ".*" + record["id"]) requests_mock.get(url_re, json={"records": [record]}) assert a.friends[0].id == id_b @@ -529,7 +731,7 @@ def test_link_field__load_many(requests_mock): """ person_id = fake_id("rec", "A") - person_url = Person.get_table().record_url(person_id) + person_url = Person.meta.table.urls.record(person_id) friend_ids = [fake_id("rec", c) for c in "123456789ABCDEF"] person_json = { @@ -551,7 +753,7 @@ def test_link_field__load_many(requests_mock): # The mocked URL specifically includes every record ID in our test set, # to ensure the library isn't somehow dropping records from its query. url_regex = ".*".join( - [re.escape(Person.get_table().url + "?filterByFormula="), *friend_ids] + [re.escape(Person.meta.table.urls.records + "?filterByFormula="), *friend_ids] ) mock_list = requests_mock.get( re.compile(url_regex), @@ -568,6 +770,258 @@ def test_link_field__load_many(requests_mock): assert mock_list.call_count == 2 +@pytest.mark.parametrize( + "mutation", + ( + "author.books = [book]", + "author.books.append(book)", + "author.books[0] = book", + "author.books.insert(0, book)", + "author.books[0:1] = []", + "author.books.pop(0)", + "del author.books[0]", + "author.books.remove(author.books[0])", + "author.books.clear()", + "author.books.extend([book])", + ), +) +def test_link_field__save(requests_mock, mutation): + """ + Test that we correctly detect changes to linked fields and save them. + """ + + class Book(Model): + Meta = fake_meta() + + class Author(Model): + Meta = fake_meta() + books = f.LinkField("Books", model=Book) + + b1 = Book.from_record(fake_record()) + b2 = Book.from_record(fake_record()) + author = Author.from_record(fake_record({"Books": [b1.id]})) + + def _cb(request, context): + return { + "id": author.id, + "createdTime": datetime_to_iso_str(author.created_time), + "fields": request.json()["fields"], + } + + requests_mock.get( + Book.meta.table.urls.records, + json={"records": [b1.to_record(), b2.to_record()]}, + ) + m = requests_mock.patch(Author.meta.table.urls.record(author.id), json=_cb) + exec(mutation, {}, {"author": author, "book": b2}) + assert author._changed["Books"] + author.save() + assert m.call_count == 1 + assert "Books" in m.last_request.json()["fields"] + + +def test_single_link_field(): + class Author(Model): + Meta = fake_meta() + name = f.TextField("Name") + + class Book(Model): + Meta = fake_meta() + author = f.SingleLinkField("Author", Author, lazy=True) + + assert Book.author.linked_model is Author + + book = Book() + assert book.author is None + + with pytest.raises(TypeError): + book.author = [Author()] + + with pytest.raises(TypeError): + book.author = [] + + alice = Author.from_record(fake_record(Name="Alice")) + book.author = alice + + with mock.patch("pyairtable.Table.get", return_value=alice.to_record()) as m: + book.author.fetch() + m.assert_called_once_with(alice.id, **Author.meta.request_kwargs) + + assert book.author.id == alice.id + assert book.author.name == "Alice" + + book.author = (bob := Author(name="Bob")) + assert not book.author.exists() + assert book.author.name == "Bob" + + with mock.patch("pyairtable.Table.create", return_value=fake_record()) as m: + book.author.save() + m.assert_called_once_with( + {"Name": "Bob"}, + typecast=True, + use_field_ids=False, + ) + + with mock.patch("pyairtable.Table.create", return_value=fake_record()) as m: + book.save() + m.assert_called_once_with( + {"Author": [bob.id]}, + typecast=True, + use_field_ids=False, + ) + + with mock.patch("pyairtable.Table.update", return_value=book.to_record()) as m: + book.author = None + book.save() + m.assert_called_once_with( + book.id, + {"Author": None}, + typecast=True, + use_field_ids=False, + ) + + +def test_single_link_field__multiple_values(): + """ + Test the behavior of SingleLinkField when the Airtable API + returns multiple values. + """ + + class Author(Model): + Meta = fake_meta() + name = f.TextField("Name") + + class Book(Model): + Meta = fake_meta() + author = f.SingleLinkField("Author", Author) + + records = [fake_record(Name=f"Author {n+1}") for n in range(3)] + a1, a2, a3 = [r["id"] for r in records] + + # if Airtable sends back multiple IDs, we'll only retrieve the first one. + book = Book.from_record(fake_record(Author=[a1, a2, a3])) + with mock.patch("pyairtable.Table.all", return_value=records) as m: + book.author + m.assert_called_once_with( + **Book.meta.request_kwargs, + formula=OR(RECORD_ID().eq(records[0]["id"])), + ) + + assert book.author.id == a1 + assert book.author.name == "Author 1" + assert book._fields["Author"][1:] == [a2, a3] # not converted to models + + # if book.author.__set__ not called, the entire list will be sent back to the API + with mock.patch("pyairtable.Table.update", return_value=book.to_record()) as m: + book.save(force=True) + m.assert_called_once_with( + book.id, + {"Author": [a1, a2, a3]}, + typecast=True, + use_field_ids=False, + ) + + # if we modify the field value, it will drop items 2-N + book.author = Author.from_record(fake_record()) + with mock.patch("pyairtable.Table.update", return_value=book.to_record()) as m: + book.save() + m.assert_called_once_with( + book.id, + {"Author": [book.author.id]}, + typecast=True, + use_field_ids=False, + ) + + +def test_single_link_field__raise_if_many(): + """ + Test that passing raise_if_many=True to SingleLinkField will cause an exception + to be raised if (1) the field receives multiple values and (2) is accessed. + """ + + class Author(Model): + Meta = fake_meta() + name = f.TextField("Name") + + class Book(Model): + Meta = fake_meta() + author = f.SingleLinkField("Author", Author, raise_if_many=True) + + book = Book.from_record(fake_record(Author=[fake_id(), fake_id()])) + with pytest.raises(pyairtable.exceptions.MultipleValuesError): + book.author + + +@pytest.mark.parametrize("field_type", (f.LinkField, f.SingleLinkField)) +def test_link_field__populate(field_type, requests_mock): + """ + Test that implementers can use Model.link_field.populate(instance) to control + whether loading happens lazy or non-lazy at runtime. + """ + + class Linked(Model): + Meta = fake_meta() + name = f.TextField("Name") + + class T(Model): + Meta = fake_meta() + link = field_type("Link", Linked) + + links = [fake_record(id=n, Name=f"link{n}") for n in range(1, 4)] + link_ids = [link["id"] for link in links] + obj = T.from_record(fake_record(Link=link_ids[:])) + assert obj._fields.get("Link") == link_ids + assert obj._fields.get("Link") is not link_ids + + # calling the record directly will attempt network traffic + with pytest.raises(NoMockAddress): + obj.link + + # on a non-lazy field, we can still call .populate() to load it lazily + T.link.populate(obj, lazy=True) + + if field_type is f.SingleLinkField: + assert isinstance(obj.link, Linked) + assert obj.link.id == links[0]["id"] + assert obj.link.name == "" + else: + assert isinstance(obj.link[0], Linked) + assert link_ids == [link.id for link in obj.link] + assert all(link.name == "" for link in obj.link) + + # calling .populate() on the wrong model raises an exception + with pytest.raises(RuntimeError): + T.link.populate(Linked()) + + +@pytest.mark.parametrize("field_type", (f.LinkField, f.SingleLinkField)) +def test_link_field__populate_with_field_ids(field_type, requests_mock): + """ + Test that implementers can use Model.link_field.populate(instance) + when the linked model uses field IDs rather than field names. + """ + field_id = fake_id("fld") + record_ids = [fake_id("rec", n) for n in range(3)] + records = [ + fake_record(id=record_id, Name=f"link{n}") + for n, record_id in enumerate(record_ids) + ] + + class Linked(Model): + Meta = fake_meta(use_field_ids=True) + name = f.TextField(field_id) + + class T(Model): + Meta = fake_meta() + link = field_type("Link", Linked) + + m = requests_mock.get(Linked.meta.table.urls.records, json={"records": records}) + obj = T.from_record(fake_record(Link=record_ids)) + obj.link + assert m.call_count == 1 + assert m.last_request.qs.get("returnFieldsByFieldId") == ["1"] + + def test_lookup_field(): class T: items = f.LookupField("Items") @@ -613,3 +1067,174 @@ class T: with pytest.raises(ValueError): T().rating = 0 + + +def test_datetime_timezones(requests_mock): + """ + Test that DatetimeField handles time zones properly. + """ + + class M(Model): + Meta = fake_meta() + dt = f.DatetimeField("dt") + + obj = M.from_record(fake_record(dt="2024-02-29T12:34:56Z")) + + def patch_callback(request, context): + return { + "id": obj.id, + "createdTime": datetime_to_iso_str(obj.created_time), + "fields": request.json()["fields"], + } + + m = requests_mock.patch(M.meta.table.urls.record(obj.id), json=patch_callback) + + # Test that we parse the "Z" into UTC correctly + assert obj.dt.date() == datetime.date(2024, 2, 29) + assert obj.dt.tzinfo is datetime.timezone.utc + obj.save(force=True) + assert m.last_request.json()["fields"]["dt"] == "2024-02-29T12:34:56.000Z" + + # Test that we can set a UTC timezone and it will be saved as-is. + obj.dt = datetime.datetime(2024, 3, 1, 11, 22, 33, tzinfo=datetime.timezone.utc) + obj.save() + assert m.last_request.json()["fields"]["dt"] == "2024-03-01T11:22:33.000Z" + + # Test that we can set a local timezone and it will be sent to Airtable. + pacific = datetime.timezone(datetime.timedelta(hours=-8)) + obj.dt = datetime.datetime(2024, 3, 1, 11, 22, 33, tzinfo=pacific) + obj.save() + assert m.last_request.json()["fields"]["dt"] == "2024-03-01T11:22:33.000-08:00" + + # Test that a timezone-unaware datetime is passed as-is to Airtable. + # This behavior will vary depending on how the field is configured. + # See https://airtable.com/developers/web/api/field-model#dateandtime + obj.dt = datetime.datetime(2024, 3, 1, 11, 22, 33) + obj.save() + assert m.last_request.json()["fields"]["dt"] == "2024-03-01T11:22:33.000" + + +@pytest.mark.parametrize( + "fields,expected", + [ + ({}, None), + ({"Field": None}, None), + ({"Field": ""}, ""), + ({"Field": "xyz"}, "xyz"), + ], +) +def test_select_field(fields, expected): + """ + Test that select field distinguishes between empty string and None. + """ + + class T(Model): + Meta = fake_meta() + the_field = f.SelectField("Field") + + obj = T.from_record(fake_record(**fields)) + assert obj.the_field == expected + + with mock.patch("pyairtable.Table.update", return_value=obj.to_record()) as m: + obj.save(force=True) + m.assert_called_once_with( + obj.id, + fields, + typecast=True, + use_field_ids=False, + ) + + +@pytest.mark.parametrize( + "class_kwargs", + [ + {"contains_type": 1}, + {"list_class": 1}, + {"list_class": dict}, + ], +) +def test_invalid_list_class_params(class_kwargs): + """ + Test that certain parameters to ListField are invalid. + """ + + with pytest.raises(TypeError): + + class ListFieldSubclass(f._ListField, **class_kwargs): + pass + + +@mock.patch("pyairtable.Table.create") +def test_attachments__set(mock_create): + """ + Test that AttachmentsField can be set with a list of AttachmentDict, + and the value will be coerced to an AttachmentsList. + """ + mock_create.return_value = { + "id": fake_id(), + "createdTime": DATETIME_S, + "fields": { + "Attachments": [ + { + "id": fake_id("att"), + "url": "https://example.com", + "filename": "a.jpg", + } + ] + }, + } + + class T(Model): + Meta = fake_meta() + attachments = f.AttachmentsField("Attachments") + + obj = T() + assert obj.attachments == [] + assert isinstance(obj.attachments, AttachmentsList) + + obj.attachments = [{"url": "https://example.com"}] + assert isinstance(obj.attachments, AttachmentsList) + + obj.save() + assert isinstance(obj.attachments, AttachmentsList) + assert obj.attachments[0]["url"] == "https://example.com" + + +def test_attachments__set_invalid_type(): + class T(Model): + Meta = fake_meta() + attachments = f.AttachmentsField("Attachments") + + with pytest.raises(TypeError): + T().attachments = [1, 2, 3] + + +def test_field_schema(table, mock_table_schema): + """ + Test that an ORM field can retrieve its own field schema. + """ + + class Apartment(Model): + class Meta: + api_key = fake_id("pat") + base_id = table.base.id + table_name = "Apartments" + + name = f.TextField("Name") + pictures = f.AttachmentsField("Pictures") + + name = Apartment.name.field_schema() + assert isinstance(name, schema.SingleLineTextFieldSchema) + assert name.id == "fld1VnoyuotSTyxW1" + + pictures = Apartment.pictures.field_schema() + assert isinstance(pictures, schema.MultipleAttachmentsFieldSchema) + assert pictures.id == "fldoaIqdn5szURHpw" + assert pictures.options.is_reversed is False + + +def test_field_schema__detached(table, requests_mock): + with pytest.raises(RuntimeError): + f.TextField("Detached Field").field_schema() + with pytest.raises(RuntimeError): + f._FieldSchema().field_schema() diff --git a/tests/test_orm_generate.py b/tests/test_orm_generate.py new file mode 100644 index 00000000..6692516e --- /dev/null +++ b/tests/test_orm_generate.py @@ -0,0 +1,196 @@ +import pytest + +from pyairtable.models import schema +from pyairtable.orm import generate +from pyairtable.testing import fake_id + + +@pytest.mark.parametrize( + "value,expected", + [ + ("Apartments", "Apartment"), + ("Apartment", "Apartment"), + ("Ice Cold Slushees", "IceColdSlushee"), + ("Table 5.6", "Table5_6"), + ("53rd Avenue", "_53rdAvenue"), + ("(53rd Avenue)", "_53rdAvenue"), + ], +) +def test_table_class_name(value, expected): + assert generate.table_class_name(value) == expected + + +@pytest.mark.parametrize( + "value,expected", + [ + ("Apartments", "apartments"), + ("Apartment", "apartment"), + ("Ice Cold Slushees", "ice_cold_slushees"), + ("Checked?", "checked"), + ("Is checked?", "is_checked"), + ("* Something weird (but kinda long!)", "something_weird_but_kinda_long"), + ("Section 5.6", "section_5_6"), + ("53rd Avenue", "_53rd_avenue"), + ("(53rd Avenue)", "_53rd_avenue"), + ], +) +def test_field_variable_name(value, expected): + assert generate.field_variable_name(value) == expected + + +@pytest.mark.parametrize( + "result_schema,expected", + [ + (None, "Any"), + ({"type": "multipleRecordLinks"}, "str"), + ({"type": "singleLineText"}, "str"), + ({"type": "number"}, "Union[int, float]"), + ({"type": "date"}, "date"), + ({"type": "dateTime"}, "datetime"), + ({"type": "rating"}, "int"), + ({"type": "duration"}, "timedelta"), + ({"type": "checkbox"}, "bool"), + ({"type": "multipleAttachments"}, "dict"), + ({"type": "multipleSelects"}, "str"), + ], +) +def test_lookup_field_type_annotation(result_schema, expected): + struct = { + "id": fake_id("fld"), + "name": "Fake Field", + "type": "multipleLookupValues", + "options": {"isValid": True, "result": result_schema}, + } + obj = schema.MultipleLookupValuesFieldSchema.model_validate(struct) + assert generate.lookup_field_type_annotation(obj) == expected + + +@pytest.mark.parametrize( + "schema_data,expected", + [ + # basic field is looked up from the type + ( + {"type": "singleLineText"}, + "field = F.TextField('Field')", + ), + # formula field that's missing result.type gets a generic field + ( + {"type": "formula", "options": {"formula": "1", "isValid": True}}, + "field = F.Field('Field', readonly=True)", + ), + # formula field with result.type should look up the right class + ( + { + "type": "formula", + "options": { + "formula": "1", + "isValid": True, + "result": {"type": "multipleAttachments"}, + }, + }, + "field = F.AttachmentsField('Field', readonly=True)", + ), + # lookup field should share more about types + ( + { + "type": "multipleLookupValues", + "options": { + "isValid": True, + "fieldIdInLinkedValue": fake_id("fld"), + "recordLinkFieldId": fake_id("fld"), + "result": {"type": "duration"}, + }, + }, + "field = F.LookupField[timedelta]('Field')", + ), + ], +) +def test_field_builder(schema_data, expected): + schema_data = {"id": fake_id("fld"), "name": "Field", **schema_data} + field_schema = schema.parse_field_schema(schema_data) + builder = generate.FieldBuilder(field_schema, lookup={}) + assert str(builder) == expected + + +def test_generate(base, mock_base_metadata): + builder = generate.ModelFileBuilder(base) + code = str(builder) + assert code == ( + """\ +from __future__ import annotations + +import os +from functools import partial + +from pyairtable.orm import Model +from pyairtable.orm import fields as F + + +class Apartment(Model): + class Meta: + api_key = partial(os.environ.get, 'AIRTABLE_API_KEY') + base_id = 'appLkNDICXNqxSDhG' + table_name = 'Apartments' + + name = F.TextField('Name') + pictures = F.AttachmentsField('Pictures') + district = F.LinkField['District']('District', model='District') + + +class District(Model): + class Meta: + api_key = partial(os.environ.get, 'AIRTABLE_API_KEY') + base_id = 'appLkNDICXNqxSDhG' + table_name = 'Districts' + + name = F.TextField('Name') + apartments = F.LinkField['Apartment']('Apartments', model='Apartment') + + +__all__ = [ + 'Apartment', + 'District', +]""" + ) + + +@pytest.mark.parametrize( + "kwargs", + [ + {"table_names": ["Apartments"]}, + {"table_ids": ["tbltp8DGLhqbUmjK1"]}, + ], +) +def test_generate__table_names(base, kwargs, mock_base_metadata): + """ + Test that we can generate only some tables, and link fields + will reflect the fact that some tables are not represented. + """ + builder = generate.ModelFileBuilder(base, **kwargs) + code = str(builder) + assert code == ( + """\ +from __future__ import annotations + +import os +from functools import partial + +from pyairtable.orm import Model +from pyairtable.orm import fields as F + + +class Apartment(Model): + class Meta: + api_key = partial(os.environ.get, 'AIRTABLE_API_KEY') + base_id = 'appLkNDICXNqxSDhG' + table_name = 'Apartments' + + name = F.TextField('Name') + pictures = F.AttachmentsField('Pictures') + district = F._ListField[str]('District') + + +__all__ = [ + 'Apartment', +]""" + ) diff --git a/tests/test_orm_lists.py b/tests/test_orm_lists.py new file mode 100644 index 00000000..5700177c --- /dev/null +++ b/tests/test_orm_lists.py @@ -0,0 +1,145 @@ +from datetime import datetime, timezone +from unittest import mock + +import pytest + +from pyairtable.exceptions import ReadonlyFieldError, UnsavedRecordError +from pyairtable.orm import fields as F +from pyairtable.orm.model import Model +from pyairtable.testing import fake_id, fake_meta, fake_record + +NOW = datetime.now(timezone.utc).isoformat() + + +class Fake(Model): + Meta = fake_meta() + attachments = F.AttachmentsField("Files") + readonly_attachments = F.AttachmentsField("Other Files", readonly=True) + others = F.LinkField["Fake"]("Others", F.LinkSelf) + + +@pytest.fixture +def mock_upload(): + response = { + "id": fake_id(), + "createdTime": NOW, + "fields": { + fake_id("fld"): [ + { + "id": fake_id("att"), + "url": "https://example.com/a.txt", + "filename": "a.txt", + "type": "text/plain", + }, + ], + # Test that, if Airtable's API returns multiple fields (for some reason), + # we will only use the first field in the "fields" key (not all of them). + fake_id("fld"): [ + { + "id": fake_id("att"), + "url": "https://example.com/b.png", + "filename": "b.png", + "type": "image/png", + }, + ], + }, + } + with mock.patch("pyairtable.Table.upload_attachment", return_value=response) as m: + yield m + + +@pytest.mark.parametrize("content", [b"Hello, world!", "Hello, world!"]) +def test_attachment_upload(mock_upload, tmp_path, content): + """ + Test that we can add an attachment to a record. + """ + fp = tmp_path / "a.txt" + writer = fp.write_text if isinstance(content, str) else fp.write_bytes + writer(content) + + record = fake_record() + instance = Fake.from_record(record) + instance.attachments.upload(fp) + assert instance.attachments == [ + { + "id": mock.ANY, + "url": "https://example.com/a.txt", + "filename": "a.txt", + "type": "text/plain", + }, + ] + + mock_upload.assert_called_once_with( + record["id"], + "Files", + filename=fp, + content=None, + content_type=None, + ) + + +def test_attachment_upload__readonly(mock_upload): + """ + Test that calling upload() on a readonly field will raise an exception. + """ + record = fake_record() + instance = Fake.from_record(record) + with pytest.raises(ReadonlyFieldError): + instance.readonly_attachments.upload("a.txt", content="Hello, world!") + + +def test_attachment_upload__unsaved_record(mock_upload): + """ + Test that calling upload() on an unsaved record will not call the API + and instead raises an exception. + """ + instance = Fake() + with pytest.raises(UnsavedRecordError): + instance.attachments.upload("a.txt", content=b"Hello, world!") + mock_upload.assert_not_called() + + +def test_attachment_upload__unsaved_value(mock_upload): + """ + Test that calling upload() on an attachment list will clobber + any other unsaved changes made to that field. + + This is not necessarily the most useful side effect, but it's the + only rational way to deal with the fact that Airtable will return + the full field value in its response, with no straightforward way + for us to identify the specific attachment that was uploaded. + """ + instance = Fake.from_record(fake_record()) + unsaved_url = "https://example.com/unsaved.txt" + instance.attachments = [{"url": unsaved_url}] + instance.attachments.upload("b.txt", content="Hello, world!") + mock_upload.assert_called_once() + assert len(instance.attachments) == 1 + assert instance.attachments[0]["url"] != unsaved_url + + +@pytest.mark.parametrize( + "op,retval,new_value", + [ + (mock.call.append(4), None, [1, 2, 3, 4]), + (mock.call.insert(1, 4), None, [1, 4, 2, 3]), + (mock.call.remove(2), None, [1, 3]), + (mock.call.clear(), None, []), + (mock.call.extend([4, 5]), None, [1, 2, 3, 4, 5]), + (mock.call.pop(), 3, [1, 2]), + ], +) +def test_change_tracking_list(op, retval, new_value): + """ + Test that ChangeTrackingList performs operations normally + and records (on the model instance) that the field changed. + """ + instance = Fake.from_record(fake_record()) + ctl = F.ChangeTrackingList[int]([1, 2, 3], field=Fake.others, model=instance) + assert not instance._changed.get("Others") + + fn = getattr(ctl, op._mock_parent._mock_name) + result = fn(*op.args, **op.kwargs) + assert result == retval + assert ctl == new_value + assert instance._changed["Others"] is True diff --git a/tests/test_orm_model.py b/tests/test_orm_model.py index a9d3e38c..6729d8fd 100644 --- a/tests/test_orm_model.py +++ b/tests/test_orm_model.py @@ -1,12 +1,30 @@ +import pickle +from datetime import datetime, timezone from functools import partial from unittest import mock import pytest +from requests_mock import Mocker from pyairtable.orm import Model from pyairtable.orm import fields as f +from pyairtable.orm.model import SaveResult from pyairtable.testing import fake_id, fake_meta, fake_record +NOW = datetime.now(timezone.utc).isoformat() + + +class FakeModel(Model): + Meta = fake_meta() + one = f.TextField("one") + two = f.TextField("two") + + +class FakeModelByIds(Model): + Meta = fake_meta(use_field_ids=True, table_name="Apartments") + name = f.TextField("fld1VnoyuotSTyxW1") + age = f.NumberField("fld2VnoyuotSTy4g6") + @pytest.fixture(autouse=True) def no_requests(requests_mock): @@ -51,6 +69,63 @@ class Address(Model): street = f.TextField("Street") +def test_model_empty_meta_with_callable(): + """ + Test that we throw an exception when a required Meta attribute is + defined as a callable which returns None. + """ + + class Address(Model): + Meta = fake_meta(api_key=lambda: None) + street = f.TextField("Street") + + with mock.patch("pyairtable.Table.first", return_value=fake_record()) as m: + with pytest.raises(ValueError): + Address.first() + m.assert_not_called() + + +def test_model_meta_dict(): + """ + Test that we can define Meta as a dict rather than a class. + """ + + class Address(Model): + Meta = { + "api_key": "fake_api_key", + "base_id": "fake_base_id", + "table_name": "fake_table_name", + "timeout": (1, 1), + "retry": False, + } + + assert Address.meta.api.api_key == "fake_api_key" + + +@pytest.mark.parametrize("invalid_meta", ([1, 2, 3], "invalid", True)) +def test_model_invalid_meta(invalid_meta): + """ + Test that model creation raises a TypeError if Meta is an invalid type. + """ + with pytest.raises(TypeError): + + class Address(Model): + Meta = invalid_meta + + +@pytest.mark.parametrize("meta_kwargs", [{"timeout": 1}, {"retry": "sure"}]) +def test_model_meta_checks_types(meta_kwargs): + """ + Test that accessing meta raises a TypeError if a value is an invalid type. + """ + + class Address(Model): + Meta = fake_meta(**meta_kwargs) + + with pytest.raises(TypeError): + Address.meta.api + + @pytest.mark.parametrize("name", ("exists", "id")) def test_model_overlapping(name): """ @@ -68,16 +143,45 @@ def test_model_overlapping(name): ) -class FakeModel(Model): - Meta = fake_meta() - - def test_repr(): record = fake_record() assert repr(FakeModel.from_record(record)) == f"" assert repr(FakeModel()) == "" +def test_delete(): + obj = FakeModel.from_record(record := fake_record()) + with mock.patch("pyairtable.Table.delete") as mock_delete: + obj.delete() + + mock_delete.assert_called_once_with(record["id"]) + + +def test_delete__unsaved(): + obj = FakeModel() + with pytest.raises(ValueError): + obj.delete() + + +def test_fetch(): + obj = FakeModel(id=fake_id()) + assert not obj.one + assert not obj.two + + with mock.patch("pyairtable.Table.get") as mock_get: + mock_get.return_value = fake_record(one=1, two=2) + obj.fetch() + + assert obj.one == 1 + assert obj.two == 2 + + +def test_fetch__unsaved(): + obj = FakeModel() + with pytest.raises(ValueError): + obj.fetch() + + @pytest.mark.parametrize( "method,args", [ @@ -111,28 +215,178 @@ class Contact(Model): assert contact.name == "Alice" -@mock.patch("pyairtable.Table.all") -def test_from_ids(mock_all): +@mock.patch("pyairtable.Table.get") +def test_from_id__use_field_ids(mock_get, fake_records_by_id): + # Use the FakeModelByIds class to test the use_field_ids option. + fake_contact = fake_records_by_id[0] + mock_get.return_value = fake_contact + model = FakeModelByIds.from_id(fake_contact["id"]) + assert model.name == "Alice" + assert mock_get.call_count == 1 + assert mock_get.mock_calls[-1].kwargs["use_field_ids"] is True + + +@mock.patch("pyairtable.Api.iterate_requests") +def test_from_ids(mock_api): fake_records = [fake_record() for _ in range(10)] - mock_all.return_value = fake_records + mock_api.return_value = [{"records": fake_records}] fake_ids = [record["id"] for record in fake_records] contacts = FakeModel.from_ids(fake_ids) - mock_all.assert_called_once() + mock_api.assert_called_once_with( + method="get", + url=FakeModel.meta.table.urls.records, + fallback=("post", FakeModel.meta.table.urls.records_post), + options={ + **FakeModel.meta.request_kwargs, + "formula": ( + "OR(%s)" % ", ".join(f"RECORD_ID()='{id}'" for id in sorted(fake_ids)) + ), + }, + ) assert len(contacts) == len(fake_records) assert {c.id for c in contacts} == {r["id"] for r in fake_records} + +@mock.patch("pyairtable.Table.all") +def test_from_ids__invalid_id(mock_all): # Should raise KeyError because of the invalid ID - mock_all.reset_mock() with pytest.raises(KeyError): - FakeModel.from_ids(fake_ids + ["recDefinitelyNotValid"]) + FakeModel.from_ids(["recDefinitelyNotValid"]) mock_all.assert_called_once() +@mock.patch("pyairtable.Table.all") +def test_from_ids__no_fetch(mock_all): + fake_ids = [fake_id() for _ in range(10)] + contacts = FakeModel.from_ids(fake_ids, fetch=False) + assert mock_all.call_count == 0 + assert len(contacts) == 10 + assert set(contact.id for contact in contacts) == set(fake_ids) + + +@mock.patch("pyairtable.Table.all") +def test_from_ids__use_field_ids(mock_all): + fake_ids = [fake_id() for _ in range(10)] + mock_all.return_value = [ + fake_record( + id=record_id, + fld1VnoyuotSTyxW1=f"Name {idx}", + fld2VnoyuotSTy4g6=(idx + 40), + ) + for idx, record_id in enumerate(fake_ids) + ] + models = FakeModelByIds.from_ids(fake_ids) + assert mock_all.call_count == 1 + assert mock_all.mock_calls[-1].kwargs["use_field_ids"] is True + assert models[0].name == "Name 0" + assert models[0].age == 40 + assert models[1].name == "Name 1" + assert models[1].age == 41 + + +@pytest.mark.parametrize( + "methodname,returns", + ( + ("all", [fake_record(), fake_record(), fake_record()]), + ("first", fake_record()), + ), +) +def test_passthrough(methodname, returns): + """ + Test that .all() and .first() pass through whatever they get. + """ + with mock.patch( + f"pyairtable.Table.{methodname}", return_value=returns + ) as mock_endpoint: + method = getattr(FakeModel, methodname) + method(a=1, b=2, c=3) + mock_endpoint.assert_called_once_with( + a=1, + b=2, + c=3, + use_field_ids=getattr(FakeModel.Meta, "use_field_ids", False), + user_locale=None, + time_zone=None, + cell_format="json", + ) + + +@pytest.fixture +def fake_records_by_id(): + return [ + fake_record(fld1VnoyuotSTyxW1="Alice"), + fake_record(Name="Jack"), # values for negative test + ] + + +def test_get_fields_by_id(fake_records_by_id): + """ + Test that we can get fields by their field ID. + """ + with Mocker() as mock: + mock.get( + FakeModelByIds.meta.table.urls.records.add_qs( + returnFieldsByFieldId=1, + cellFormat="json", + ), + json={"records": fake_records_by_id}, + complete_qs=True, + status_code=200, + ) + fake_models = FakeModelByIds.all() + + assert fake_models[0].name == "Alice" + assert fake_models[1].name == "" + + +def test_meta_wrapper(): + """ + Test that Model subclasses have access to the _Meta wrapper. + """ + + class Dummy(Model): + Meta = fake_meta(api_key="asdf") + + assert Dummy.meta.model is Dummy + assert Dummy.meta.api.api_key == "asdf" + + +def test_meta_dict(): + """ + Test that Meta can be a dict instead of a class. + """ + + class Dummy(Model): + Meta = { + "api_key": "asdf", + "base_id": "qwer", + "table_name": "zxcv", + "timeout": (1, 1), + } + + assert Dummy.meta.model is Dummy + assert Dummy.meta.api.api_key == "asdf" + + +@pytest.mark.parametrize("meta_kwargs", [{"timeout": 1}, {"retry": "asdf"}]) +def test_meta_type_check(meta_kwargs): + """ + Test that we check types on certain Meta attributes. + """ + + class Dummy(Model): + Meta = fake_meta(**meta_kwargs) + + with pytest.raises(TypeError): + Dummy.meta.api + + def test_dynamic_model_meta(): """ Test that we can provide callables in our Meta class to provide - the access token, base ID, and table name at runtime. + the access token, base ID, and table name at runtime. Also ensure + that callable Meta attributes don't get called until they're needed. """ data = { "api_key": "FakeApiKey", @@ -144,12 +398,179 @@ class Fake(Model): class Meta: api_key = lambda: data["api_key"] # noqa base_id = partial(data.get, "base_id") - - @staticmethod - def table_name(): - return data["table_name"] + table_name = mock.Mock(return_value=data["table_name"]) f = Fake() - assert f._get_meta("api_key") == data["api_key"] - assert f._get_meta("base_id") == data["base_id"] - assert f._get_meta("table_name") == data["table_name"] + Fake.Meta.table_name.assert_not_called() + + assert f.meta.api_key == data["api_key"] + assert f.meta.base_id == data["base_id"] + assert f.meta.table_name == data["table_name"] + Fake.Meta.table_name.assert_called_once() + + +@mock.patch("pyairtable.Table.create") +def test_save__create(mock_create): + """ + Test that we can save a model instance we've created. + """ + mock_create.return_value = { + "id": fake_id, + "createdTime": datetime.now(timezone.utc).isoformat(), + "fields": {"one": "ONE", "two": "TWO"}, + } + obj = FakeModel(one="ONE", two="TWO") + result = obj.save() + assert result.saved + assert result.created + assert result.field_names == {"one", "two"} + assert not result.updated + assert not result.forced + mock_create.assert_called_once_with( + {"one": "ONE", "two": "TWO"}, + typecast=True, + use_field_ids=False, + ) + + +@mock.patch("pyairtable.Table.update") +def test_save__update(mock_update): + """ + Test that we can save a model instance that already exists. + """ + obj = FakeModel.from_record(fake_record(one="ONE", two="TWO")) + obj.one = "new value" + result = obj.save() + assert result.saved + assert not result.created + assert result.updated + assert result.field_names == {"one"} + assert not result.forced + mock_update.assert_called_once_with( + obj.id, + {"one": "new value"}, + typecast=True, + use_field_ids=False, + ) + + +@mock.patch("pyairtable.Table.update") +def test_save__update_force(mock_update): + """ + Test that we can save a model instance that already exists, + and we can force saving all values to the API. + """ + obj = FakeModel.from_record(fake_record(one="ONE", two="TWO")) + obj.one = "new value" + result = obj.save(force=True) + assert result.saved + assert not result.created + assert result.updated + assert result.forced + assert result.field_names == {"one", "two"} + mock_update.assert_called_once_with( + obj.id, {"one": "new value", "two": "TWO"}, typecast=True, use_field_ids=False + ) + + +@mock.patch("pyairtable.Table.update") +def test_save__noop(mock_update): + """ + Test that if a model is unchanged, we don't try to save it to the API. + """ + obj = FakeModel.from_record(fake_record(one="ONE", two="TWO")) + result = obj.save() + assert not result.saved + assert not result.created + assert not result.updated + assert not result.field_names + assert not result.forced + mock_update.assert_not_called() + + +@mock.patch("pyairtable.Table.create") +def test_save__use_field_ids__create(mock_create): + """ + Test that we can correctly save a model which uses field IDs. + """ + mock_create.return_value = fake_record(**{FakeModelByIds.name.field_name: "Alice"}) + obj = FakeModelByIds(name="Alice") + obj.save() + mock_create.assert_called_once_with( + {FakeModelByIds.name.field_name: "Alice"}, + typecast=True, + use_field_ids=True, + ) + + +@mock.patch("pyairtable.Table.update") +def test_save__use_field_ids__update(mock_update): + """ + Test that we can correctly save a model which uses field IDs. + """ + record = fake_record(**{FakeModelByIds.name.field_name: "Alice"}) + mock_update.return_value = record + obj = FakeModelByIds.from_record(record) + obj.name = "Bob" + obj.save() + mock_update.assert_called_once_with( + obj.id, + {FakeModelByIds.name.field_name: "Bob"}, + typecast=True, + use_field_ids=True, + ) + + +def test_save_bool_deprecated(): + """ + Test that SaveResult instances can be used as booleans, but emit a deprecation warning. + """ + with pytest.deprecated_call(): + assert bool(SaveResult(fake_id(), created=False)) is False + + with pytest.deprecated_call(): + assert bool(SaveResult(fake_id(), created=True)) is True + + +def test_pickling(): + """ + Test that a model instance can be pickled and unpickled. + """ + instance = FakeModel.from_record(fake_record(one="one", two="two")) + pickled = pickle.dumps(instance) + unpickled = pickle.loads(pickled) + assert isinstance(unpickled, FakeModel) + assert unpickled is not instance + assert unpickled.id == instance.id + assert unpickled.created_time == instance.created_time + assert unpickled._fields == instance._fields + + +class LinkedModel(Model): + Meta = fake_meta() + name = f.TextField("Name") + links = f.LinkField("Link", FakeModel) + + +def test_pickling_with_change_tracking_list(): + """ + Test that a model with a ChangeTrackingList can be pickled and unpickled. + """ + fake_models = [FakeModel.from_record(fake_record()) for _ in range(5)] + instance = LinkedModel.from_record(fake_record()) + instance.links = fake_models + instance._changed.clear() # Don't want to pickle that part. + + # Now we need to be able to pickle and unpickle the model instance. + # We can't pickle/unpickle the list itself on its own, because it needs + # to retain references to the field and model. + pickled = pickle.dumps(instance) + unpickled = pickle.loads(pickled) + assert isinstance(unpickled, LinkedModel) + unpickled_link_ids = [link.id for link in unpickled.links] + assert unpickled_link_ids == [link.id for link in fake_models] + + # Make sure change tracking still works. + assert "Link" not in unpickled._changed + unpickled.links.append(FakeModel.from_record(fake_record())) + assert unpickled._changed["Link"] is True diff --git a/tests/test_orm_model__memoization.py b/tests/test_orm_model__memoization.py new file mode 100644 index 00000000..3f900059 --- /dev/null +++ b/tests/test_orm_model__memoization.py @@ -0,0 +1,201 @@ +from unittest.mock import Mock + +import pytest + +from pyairtable.orm import Model +from pyairtable.orm import fields as f +from pyairtable.testing import fake_meta, fake_record + + +class Author(Model): + Meta = fake_meta(memoize=True) + name = f.TextField("Name") + books = f.LinkField["Book"]("Books", "Book") + + +class Book(Model): + Meta = fake_meta() + name = f.TextField("Title") + author = f.SingleLinkField("Author", Author) + + +@pytest.fixture(autouse=True) +def clear_memoization_and_forbid_api_calls(requests_mock): + Author._memoized.clear() + Book._memoized.clear() + + +@pytest.fixture +def record_mocks(requests_mock): + mocks = Mock() + mocks.authors = { + record["id"]: record + for record in [ + fake_record(Name="Abigail Adams"), + fake_record(Name="Babette Brown"), + fake_record(Name="Cristina Cubas"), + ] + } + mocks.books = { + record["id"]: record + for author_id, n in zip(mocks.authors, range(3)) + if (record := fake_record(Title=f"Book {n}", Author=[author_id])) + } + for book_id, book_record in mocks.books.items(): + author_record = mocks.authors[book_record["fields"]["Author"][0]] + author_record["fields"]["Books"] = [book_id] + + # for Model.all + mocks.get_authors = requests_mock.get( + Author.meta.table.urls.records, + json={"records": list(mocks.authors.values())}, + ) + mocks.get_books = requests_mock.get( + Book.meta.table.urls.records, + json={"records": list(mocks.books.values())}, + ) + + # for Model.from_id + mocks.get_author = { + record_id: requests_mock.get( + Author.meta.table.urls.record(record_id), json=record_data + ) + for record_id, record_data in mocks.authors.items() + } + mocks.get_book = { + record_id: requests_mock.get( + Book.meta.table.urls.record(record_id), json=record_data + ) + for record_id, record_data in mocks.books.items() + } + mocks.get = {**mocks.get_author, **mocks.get_book} + + return mocks + + +parametrized_memoization_test = pytest.mark.parametrize( + "cls,kwargs,expect_memoized", + [ + # Meta.memoize is True, memoize= is not provided + (Author, {}, True), + # Meta.memoize is True, memoize=False + (Author, {"memoize": False}, False), + # Meta.memoize is False, memoize= is not provided + (Book, {}, False), + # Meta.memoize is False, memoize=True + (Book, {"memoize": True}, True), + ], +) + + +@parametrized_memoization_test +def test_memoize__from_record(cls, kwargs, expect_memoized): + """ + Test whether Model.from_record saves objects to Model._memoized + """ + obj = cls.from_record(fake_record(), **kwargs) + assert_memoized(obj, expect_memoized) + + +@parametrized_memoization_test +def test_memoize__from_id(record_mocks, cls, kwargs, expect_memoized): + """ + Test whether Model.from_id saves objects to Model._memoized + """ + record_id = list(getattr(record_mocks, cls.__name__.lower() + "s"))[0] + obj = cls.from_id(record_id, **kwargs) + assert record_mocks.get[record_id].call_count == 1 + assert_memoized(obj, expect_memoized) + + +@parametrized_memoization_test +def test_memoize__all(record_mocks, cls, kwargs, expect_memoized): + """ + Test whether Model.all saves objects to Model._memoized + """ + for obj in cls.all(**kwargs): + assert_memoized(obj, expect_memoized) + + +@parametrized_memoization_test +def test_memoize__first(record_mocks, cls, kwargs, expect_memoized): + """ + Test whether Model.all saves objects to Model._memoized + """ + assert_memoized(cls.first(**kwargs), expect_memoized) + + +def assert_memoized(obj: Model, expect_memoized: bool = True): + if expect_memoized: + assert obj.__class__._memoized[obj.id] is obj + else: + assert obj.id not in obj.__class__._memoized + + +def test_from_id(): + """ + Test that Model.from_id pulls from Model._memoized, regardless + of whether Model.Meta.memoize is True or False. + """ + book = Book.from_record(fake_record()) + Book._memoized[book.id] = book + assert Book.from_id(book.id) is book + + +def test_from_ids(record_mocks): + """ + Test that Model.from_ids pulls from Model._memoized, regardless + of whether Model.Meta.memoize is True or False. + """ + book = Book.from_record(fake_record()) + Book._memoized = {book.id: book} + books = Book.from_ids([book.id, *list(record_mocks.books)]) + # We got all four, but only requested the non-memoized three from the API + assert len(books) == 4 + assert record_mocks.get_books.call_count == 1 + assert record_mocks.get_books.last_request.qs["filterByFormula"] == [ + "OR(%s)" % ", ".join(f"RECORD_ID()='{id}'" for id in sorted(record_mocks.books)) + ] + + +def test_memoize__link_field(record_mocks): + """ + Test that Model.link_field writes to Model._memoized if Model.Meta.memoize is True + """ + book_id = list(record_mocks.books)[0] + book = Book.from_id(book_id) + assert record_mocks.get[book_id].call_count == 1 + + # no memoization yet + assert not Book._memoized + assert not Author._memoized + + book.author # this makes the call + assert book.author.id == record_mocks.books[book_id]["fields"]["Author"][0] + assert Author._memoized[book.author.id] is book.author + + # test that we only ever made one network call per object + assert record_mocks.get[book.id].call_count == 1 + assert record_mocks.get[book.author.id].call_count == 0 + assert record_mocks.get_authors.call_count == 1 + assert record_mocks.get_authors.last_request.qs["filterByFormula"] == [ + f"OR(RECORD_ID()='{book.author.id}')" + ] + + +def test_memoize__link_field__populate(record_mocks): + """ + Test that Model.link_field.populate writes to Model._memoized if memoize=True + """ + author_id = list(record_mocks.authors)[0] + author = Author.from_id(author_id) + Author.books.populate(author, memoize=True) + assert len(author.books) == 1 + for book in author.books: + assert Book._memoized[book.id] is book + assert record_mocks.get[book.id].call_count == 0 + # test that we only ever made one network call + assert record_mocks.get_books.call_count == 1 + assert record_mocks.get_books.last_request.qs["filterByFormula"] == [ + "OR(%s)" % ", ".join(f"RECORD_ID()='{book.id}'" for book in author.books) + ] diff --git a/tests/test_params.py b/tests/test_params.py index 1ca0430d..0a5b706a 100644 --- a/tests/test_params.py +++ b/tests/test_params.py @@ -3,12 +3,12 @@ from requests_mock import Mocker from pyairtable.api.params import ( - InvalidParamException, dict_list_to_request_params, field_names_to_sorting_dict, options_to_json_and_params, options_to_params, ) +from pyairtable.exceptions import InvalidParameterError def test_params_integration(table, mock_records, mock_response_iterator): @@ -17,7 +17,7 @@ def test_params_integration(table, mock_records, mock_response_iterator): "view": "View", "sort": ["Name"], "fields": ["Name", "Age"], - "return_fields_by_field_id": True, + "use_field_ids": True, } with Mocker() as m: url_params = ( @@ -30,7 +30,7 @@ def test_params_integration(table, mock_records, mock_response_iterator): "&returnFieldsByFieldId=1" "" ) - mock_url = "{0}?{1}".format(table.url, url_params) + mock_url = "{0}?{1}".format(table.urls.records, url_params) m.get(mock_url, status_code=200, json=mock_response_iterator) response = table.all(**params) for n, resp in enumerate(response): @@ -97,12 +97,13 @@ def test_params_integration(table, mock_records, mock_response_iterator): [ "time_zone", "America/Chicago", - "?timeZone=America%2FChicago" + "?timeZone=America%2FChicago", # '?timeZone=America/Chicago' ], - ["return_fields_by_field_id", True, "?returnFieldsByFieldId=1"], - ["return_fields_by_field_id", 1, "?returnFieldsByFieldId=1"], - ["return_fields_by_field_id", False, "?returnFieldsByFieldId=0"], + ["use_field_ids", True, "?returnFieldsByFieldId=1"], + ["use_field_ids", 1, "?returnFieldsByFieldId=1"], + ["use_field_ids", False, "?returnFieldsByFieldId=0"], + ["count_comments", True, "?recordMetadata%5B%5D=commentCount"], # TODO # [ # {"sort": [("Name", "desc"), ("Phone", "asc")]}, @@ -163,9 +164,11 @@ def test_convert_options_to_params(option, value, url_params): }, ], ["cell_format", "string", {"cellFormat": "string"}], - ["return_fields_by_field_id", True, {"returnFieldsByFieldId": True}], - ["return_fields_by_field_id", 1, {"returnFieldsByFieldId": True}], - ["return_fields_by_field_id", False, {"returnFieldsByFieldId": False}], + ["use_field_ids", True, {"returnFieldsByFieldId": True}], + ["use_field_ids", 1, {"returnFieldsByFieldId": True}], + ["use_field_ids", False, {"returnFieldsByFieldId": False}], + ["count_comments", True, {"recordMetadata": ["commentCount"]}], + ["count_comments", False, {}], # userLocale and timeZone are not supported via POST, so they return "spare params" ["user_locale", "en-US", ({}, {"userLocale": "en-US"})], ["time_zone", "America/Chicago", ({}, {"timeZone": "America/Chicago"})], @@ -178,7 +181,7 @@ def test_convert_options_to_json(option, value, expected): def test_process_params_invalid(): - with pytest.raises(InvalidParamException): + with pytest.raises(InvalidParameterError): options_to_params({"ffields": "x"}) @@ -205,3 +208,34 @@ def test_field_names_to_sorting_dict(): "direction": "desc", }, ] + + +def test_record_metadata_options(monkeypatch): + """Test that OPTIONS_TO_RECORD_METADATA can be extended for future metadata options.""" + import pyairtable.api.params + + monkeypatch.setattr( + pyairtable.api.params, + "OPTIONS_TO_RECORD_METADATA", + {"count_comments": "commentCount", "future_option": "futureValue"}, + ) + + # Test GET params with multiple recordMetadata options + result = options_to_params({"count_comments": True, "future_option": True}) + assert set(result.get("recordMetadata[]", [])) == { + "commentCount", + "futureValue", + } + + # Test POST JSON with multiple recordMetadata options + json_result, _ = options_to_json_and_params( + {"count_comments": True, "future_option": True} + ) + assert set(json_result.get("recordMetadata", [])) == { + "commentCount", + "futureValue", + } + + # Test with only one option enabled + result = options_to_params({"count_comments": False, "future_option": True}) + assert result.get("recordMetadata[]") == ["futureValue"] diff --git a/tests/test_testing.py b/tests/test_testing.py new file mode 100644 index 00000000..7400b0a0 --- /dev/null +++ b/tests/test_testing.py @@ -0,0 +1,90 @@ +import re +from unittest.mock import ANY + +from pyairtable import testing as T + + +def test_fake_id(): + assert re.match(r"rec[a-zA-Z0-9]{14}", T.fake_id()) + assert T.fake_id(value=123) == "rec00000000000123" + assert T.fake_id("tbl", "x") == "tbl0000000000000x" + + +def test_fake_record(): + assert T.fake_record(id=123) == { + "id": "rec00000000000123", + "createdTime": ANY, + "fields": {}, + } + assert T.fake_record(id="recABC00000000123") == { + "id": "recABC00000000123", + "createdTime": ANY, + "fields": {}, + } + assert T.fake_record({"A": 1}, 123) == { + "id": "rec00000000000123", + "createdTime": ANY, + "fields": {"A": 1}, + } + assert T.fake_record(one=1, two=2) == { + "id": ANY, + "createdTime": ANY, + "fields": {"one": 1, "two": 2}, + } + + +def test_fake_user(): + user = T.fake_user() + assert user == { + "id": ANY, + "email": f"{user['id'].lower()}@example.com", + "name": "Fake User", + } + assert T.fake_user("Alice") == { + "id": "usr000000000Alice", + "email": "alice@example.com", + "name": "Alice", + } + + +def test_fake_attachment(): + assert T.fake_attachment() == { + "id": ANY, + "url": "https://example.com/", + "filename": "foo.txt", + "size": 100, + "type": "text/plain", + } + assert T.fake_attachment(url="https://example.com/image.png") == { + "id": ANY, + "url": "https://example.com/image.png", + "filename": "image.png", + "size": 100, + "type": "image/png", + } + assert T.fake_attachment(url="https://example.com", filename="image.png") == { + "id": ANY, + "url": "https://example.com", + "filename": "image.png", + "size": 100, + "type": "image/png", + } + + +def test_coerce_fake_record(): + assert T.coerce_fake_record({"Name": "Alice"}) == { + "id": ANY, + "createdTime": ANY, + "fields": {"Name": "Alice"}, + } + assert T.coerce_fake_record({"fields": {"Name": "Alice"}}) == { + "id": ANY, + "createdTime": ANY, + "fields": {"Name": "Alice"}, + } + assert T.coerce_fake_record({"id": "rec123", "fields": {"Name": "Alice"}}) == { + "id": "rec123", + "createdTime": ANY, + "fields": {"Name": "Alice"}, + } + assert T.coerce_fake_record(fake := T.fake_record()) == fake diff --git a/tests/test_testing__mock_airtable.py b/tests/test_testing__mock_airtable.py new file mode 100644 index 00000000..54434426 --- /dev/null +++ b/tests/test_testing__mock_airtable.py @@ -0,0 +1,266 @@ +from unittest.mock import ANY + +import pytest + +from pyairtable import testing as T + + +@pytest.fixture +def mock_airtable(requests_mock): + with T.MockAirtable() as m: + yield m + + +def test_not_reentrant(): + """ + Test that nested MockAirtable contexts raise an error. + """ + mocked = T.MockAirtable() + with mocked: + with pytest.raises(RuntimeError): + with mocked: + pass + + +def test_multiple_nested_contexts(): + """ + Test that nested MockAirtable contexts raise an error. + """ + with T.MockAirtable(): + with pytest.raises(RuntimeError): + with T.MockAirtable(): + pass + + +def test_add_records__ids(mock_airtable, table): + fake_records = [T.fake_record() for _ in range(3)] + mock_airtable.add_records(table.base.id, table.name, fake_records) + assert table.all() == fake_records + + +def test_add_records__ids_kwarg(mock_airtable, table): + fake_records = [T.fake_record() for _ in range(3)] + mock_airtable.add_records(table.base.id, table.name, records=fake_records) + assert table.all() == fake_records + + +def test_add_records__kwarg(mock_airtable, table): + fake_records = [T.fake_record() for _ in range(3)] + mock_airtable.add_records(table, records=fake_records) + assert table.all() == fake_records + + +def test_add_records__missing_kwarg(mock_airtable, table): + with pytest.raises(TypeError, match="add_records missing keyword"): + mock_airtable.add_records(table) + with pytest.raises(TypeError, match="add_records missing keyword"): + mock_airtable.add_records("base", "table") + + +def test_add_records__invalid_types(mock_airtable): + with pytest.raises( + TypeError, + match=r"add_records expected \(str, str, \.\.\.\), got \(int, float\)", + ): + mock_airtable.add_records(1, 2.0, records=[]) + + +def test_add_records__invalid_kwarg(mock_airtable, table): + with pytest.raises( + TypeError, + match="add_records got unexpected keyword arguments: asdf", + ): + mock_airtable.add_records(table, records=[], asdf=1) + + +@pytest.fixture +def mock_records(mock_airtable, table): + mock_records = [T.fake_record() for _ in range(5)] + mock_airtable.add_records(table, mock_records) + return mock_records + + +@pytest.fixture +def mock_record(mock_records): + return mock_records[0] + + +def test_set_records(mock_airtable, mock_records, table): + replace = [T.fake_record()] + mock_airtable.set_records(table, replace) + assert table.all() == replace + + +def test_set_records__ids(mock_airtable, mock_records, table): + replace = [T.fake_record()] + mock_airtable.set_records(table.base.id, table.name, replace) + assert table.all() == replace + + +def test_set_records__ids_kwarg(mock_airtable, mock_records, table): + replace = [T.fake_record()] + mock_airtable.set_records(table.base.id, table.name, records=replace) + assert table.all() == replace + + +def test_set_records__kwarg(mock_airtable, mock_records, table): + replace = [T.fake_record()] + mock_airtable.set_records(table, records=replace) + assert table.all() == replace + + +@pytest.mark.parametrize( + "funcname,expected", + [ + ("all", "mock_records"), + ("iterate", "[mock_records]"), + ("first", "mock_records[0]"), + ], +) +def test_table_iterate(mock_records, table, funcname, expected): + expected = eval(expected, {}, {"mock_records": mock_records}) + assert getattr(table, funcname)() == expected + + +def test_table_get(mock_record, table): + assert table.get(mock_record["id"]) == mock_record + + +def test_table_create(mock_airtable, table): + record = table.create(T.fake_record()["fields"]) + assert record in table.all() + + +def test_table_update(mock_record, table): + table.update(mock_record["id"], {"Name": "Bob"}) + assert table.get(mock_record["id"])["fields"]["Name"] == "Bob" + + +def test_table_delete(mock_record, table): + table.delete(mock_record["id"]) + assert mock_record not in table.all() + + +def test_table_batch_create(mock_airtable, mock_records, table): + mock_airtable.clear() + table.batch_create(mock_records) + assert all(r in table.all() for r in mock_records) + + +def test_table_batch_update(mock_records, table): + table.batch_update( + [{"id": record["id"], "fields": {"Name": "Bob"}} for record in mock_records] + ) + assert all(r["fields"]["Name"] == "Bob" for r in table.all()) + + +def test_table_batch_delete(mock_records, table): + table.batch_delete([r["id"] for r in mock_records]) + assert table.all() == [] + + +def test_table_batch_upsert(mock_airtable, table): + """ + Test that MockAirtable actually performs upsert logic correctly. + """ + mock_airtable.clear() + mock_airtable.add_records( + table, + [ + {"id": "rec001", "fields": {"Name": "Alice"}}, + {"id": "rec002", "fields": {"Name": "Bob"}}, + {"id": "rec003", "fields": {"Name": "Carol"}}, + ], + ) + table.batch_upsert( + records=[ + # matches by Name to rec001 + {"fields": {"Name": "Alice", "Email": "alice@example.com"}}, + # matches by Name to rec002 + {"fields": {"Name": "Bob", "Email": "bob@example.com"}}, + # matches by id to rec003 + {"id": "rec003", "fields": {"Email": "carol@example.com"}}, + # no match; will create the record + {"fields": {"Name": "Dave", "Email": "dave@example.com"}}, + ], + key_fields=["Name"], + ) + assert table.all() == [ + { + "id": "rec001", + "createdTime": ANY, + "fields": {"Name": "Alice", "Email": "alice@example.com"}, + }, + { + "id": "rec002", + "createdTime": ANY, + "fields": {"Name": "Bob", "Email": "bob@example.com"}, + }, + { + "id": "rec003", + "createdTime": ANY, + "fields": {"Name": "Carol", "Email": "carol@example.com"}, + }, + { + "id": ANY, + "createdTime": ANY, + "fields": {"Name": "Dave", "Email": "dave@example.com"}, + }, + ] + + +def test_table_batch_upsert__invalid_id(mock_airtable, table): + with pytest.raises(KeyError): + table.batch_upsert( + records=[ + # record does not exist + {"id": "rec999", "fields": {"Name": "Alice"}} + ], + key_fields=["Name"], + ) + + +@pytest.mark.parametrize( + "expr", + [ + "base.collaborators()", + "base.create_table('Name', fields=[])", + "base.delete()", + "base.shares()", + "base.webhooks()", + "table.add_comment('recordId', 'value')", + "table.comments('recordId')", + "table.create_field('name', 'type')", + "table.schema()", + ], +) +def test_unhandled_methods(mock_airtable, monkeypatch, expr, api, base, table): + """ + Test that unhandled methods raise an error. + """ + with pytest.raises(RuntimeError): + eval(expr, {}, {"api": api, "base": base, "table": table}) + + +def test_passthrough(mock_airtable, requests_mock, base, monkeypatch): + """ + Test that we can temporarily pass through unhandled methods to the requests library. + """ + requests_mock.get(base.urls.tables, json={"tables": []}) + + with monkeypatch.context() as mctx: + mctx.setattr(mock_airtable, "passthrough", True) + assert base.schema(force=True).tables == [] # no RuntimeError + + with mock_airtable.enable_passthrough(): + assert base.schema(force=True).tables == [] # no RuntimeError + with mock_airtable.disable_passthrough(): + with pytest.raises(RuntimeError): + base.schema(force=True) + + with mock_airtable.set_passthrough(True): + assert base.schema(force=True).tables == [] # no RuntimeError + + with mock_airtable.set_passthrough(False): + with pytest.raises(RuntimeError): + base.schema(force=True) diff --git a/tests/test_typing.py b/tests/test_typing.py index 549ad817..0e2234d3 100644 --- a/tests/test_typing.py +++ b/tests/test_typing.py @@ -1,6 +1,7 @@ """ Tests that pyairtable.api functions/methods return appropriately typed responses. """ + import datetime from typing import TYPE_CHECKING, Iterator, List, Optional, Union @@ -8,7 +9,11 @@ import pyairtable import pyairtable.api.types as T +import pyairtable.formulas as F +import pyairtable.orm.lists as L +import pyairtable.utils from pyairtable import orm +from pyairtable.models import schema if TYPE_CHECKING: # This section does not actually get executed; it is only parsed by mypy. @@ -20,7 +25,7 @@ # Ensure the type signatures for pyairtable.Api don't change. api = pyairtable.Api(access_token) - assert_type(api.build_url("foo", "bar"), str) + assert_type(api.build_url("foo", "bar"), pyairtable.utils.Url) assert_type(api.base(base_id), pyairtable.Base) assert_type(api.table(base_id, table_name), pyairtable.Table) assert_type(api.whoami(), T.UserAndScopesDict) @@ -28,7 +33,7 @@ # Ensure the type signatures for pyairtable.Base don't change. base = pyairtable.Base(api, base_id) assert_type(base.table(table_name), pyairtable.Table) - assert_type(base.url, str) + assert_type(base.id, str) # Ensure the type signatures for pyairtable.Table don't change. table = pyairtable.Table(None, base, table_name) @@ -55,28 +60,50 @@ table.update(record_id, {"Field Name": {"email": "alice@example.com"}}) table.update(record_id, {"Field Name": ["rec1", "rec2", "rec3"]}) + # Ensure batch_upsert takes both records with and without IDs + table.batch_upsert( + [ + {"fields": {"Name": "Carol"}}, + {"id": "recAsdf", "fields": {"Name": "Bob"}}, + {"id": "recAsdf", "createdTime": "", "fields": {"Name": "Alice"}}, + ], + key_fields=["Name"], + ) + # Test type annotations for the ORM class Actor(orm.Model): - name = orm.fields.TextField("Name") + name = orm.fields.SingleLineTextField("Name") logins = orm.fields.MultipleCollaboratorsField("Logins") + bio = orm.fields.MultilineTextField("Bio") - assert_type(Actor().name, Optional[str]) - assert_type(Actor().logins, List[T.CollaboratorDict]) + assert_type(Actor().name, str) + assert_type( + Actor().logins, + L.ChangeTrackingList[Union[T.CollaboratorDict, T.CollaboratorEmailDict]], + ) + Actor().logins.append({"id": "usr123"}) + Actor().logins.append({"email": "alice@example.com"}) + Actor().logins = [{"id": "usr123"}] + Actor().logins = [{"email": "alice@example.com"}] class Movie(orm.Model): name = orm.fields.TextField("Name") rating = orm.fields.RatingField("Star Rating") prequels = orm.fields.LinkField["Movie"]("Prequels", "path.to.Movie") actors = orm.fields.LinkField("Actors", Actor) + prequel = orm.fields.SingleLinkField["Movie"]("Prequels", orm.fields.LinkSelf) movie = Movie() - assert_type(movie.name, Optional[str]) + assert_type(movie.name, str) assert_type(movie.rating, Optional[int]) - assert_type(movie.actors, List[Actor]) - assert_type(movie.prequels, List[Movie]) - assert_type(movie.actors[0].name, Optional[str]) + assert_type(movie.actors, L.ChangeTrackingList[Actor]) + assert_type(movie.prequels, L.ChangeTrackingList[Movie]) + assert_type(movie.prequel, Optional[Movie]) + assert_type(movie.actors[0], Actor) + assert_type(movie.actors[0].name, str) class EveryField(orm.Model): + aitext = orm.fields.AITextField("AI Generated Text") attachments = orm.fields.AttachmentsField("Attachments") autonumber = orm.fields.AutoNumberField("Autonumber") barcode = orm.fields.BarcodeField("Barcode") @@ -104,32 +131,158 @@ class EveryField(orm.Model): rich_text = orm.fields.RichTextField("Notes") select = orm.fields.SelectField("Status") url = orm.fields.UrlField("URL") + required_aitext = orm.fields.RequiredAITextField("AI Generated Text") + required_barcode = orm.fields.RequiredBarcodeField("Barcode") + required_collaborator = orm.fields.RequiredCollaboratorField("Assignee") + required_count = orm.fields.RequiredCountField("Count") + required_currency = orm.fields.RequiredCurrencyField("Dollars") + required_date = orm.fields.RequiredDateField("Date") + required_datetime = orm.fields.RequiredDatetimeField("DateTime") + required_duration = orm.fields.RequiredDurationField("Duration (h:mm)") + required_email = orm.fields.RequiredEmailField("Email") + required_float = orm.fields.RequiredFloatField("Decimal 1") + required_integer = orm.fields.RequiredIntegerField("Integer") + required_number = orm.fields.RequiredNumberField("Number") + required_percent = orm.fields.RequiredPercentField("Percent") + required_phone = orm.fields.RequiredPhoneNumberField("Phone") + required_rating = orm.fields.RequiredRatingField("Stars") + required_rich_text = orm.fields.RequiredRichTextField("Notes") + required_select = orm.fields.RequiredSelectField("Status") + required_url = orm.fields.RequiredUrlField("URL") + # Check the types of values returned from these fields + # fmt: off record = EveryField() - assert_type(record.attachments, List[T.AttachmentDict]) - assert_type(record.autonumber, Optional[int]) + assert_type(record.aitext, Optional[T.AITextDict]) + assert_type(record.attachments, L.AttachmentsList) + assert_type(record.attachments[0], Union[T.AttachmentDict, T.CreateAttachmentDict]) + assert_type(record.attachments.upload("", b""), None) + assert_type(record.autonumber, int) assert_type(record.barcode, Optional[T.BarcodeDict]) - assert_type(record.button, Optional[T.ButtonDict]) - assert_type(record.checkbox, Optional[bool]) - assert_type(record.collaborator, Optional[T.CollaboratorDict]) + assert_type(record.button, T.ButtonDict) + assert_type(record.checkbox, bool) + assert_type(record.collaborator, Optional[Union[T.CollaboratorDict, T.CollaboratorEmailDict]]) assert_type(record.count, Optional[int]) - assert_type(record.created_by, Optional[T.CollaboratorDict]) - assert_type(record.created, Optional[datetime.datetime]) + assert_type(record.created_by, T.CollaboratorDict) + assert_type(record.created, datetime.datetime) assert_type(record.currency, Optional[Union[int, float]]) assert_type(record.date, Optional[datetime.date]) assert_type(record.datetime, Optional[datetime.datetime]) assert_type(record.duration, Optional[datetime.timedelta]) - assert_type(record.email, Optional[str]) + assert_type(record.email, str) assert_type(record.float, Optional[float]) assert_type(record.integer, Optional[int]) assert_type(record.last_modified_by, Optional[T.CollaboratorDict]) assert_type(record.last_modified, Optional[datetime.datetime]) - assert_type(record.multi_user, List[T.CollaboratorDict]) - assert_type(record.multi_select, List[str]) + assert_type(record.multi_user, L.ChangeTrackingList[Union[T.CollaboratorDict, T.CollaboratorEmailDict]]) + assert_type(record.multi_user[0], Union[T.CollaboratorDict, T.CollaboratorEmailDict]) + assert_type(record.multi_select, L.ChangeTrackingList[str]) + assert_type(record.multi_select[0], str) assert_type(record.number, Optional[Union[int, float]]) assert_type(record.percent, Optional[Union[int, float]]) - assert_type(record.phone, Optional[str]) + assert_type(record.phone, str) assert_type(record.rating, Optional[int]) - assert_type(record.rich_text, Optional[str]) + assert_type(record.rich_text, str) assert_type(record.select, Optional[str]) - assert_type(record.url, Optional[str]) + assert_type(record.url, str) + assert_type(record.required_aitext, T.AITextDict) + assert_type(record.required_barcode, T.BarcodeDict) + assert_type(record.required_collaborator, Union[T.CollaboratorDict, T.CollaboratorEmailDict]) + assert_type(record.required_count, int) + assert_type(record.required_currency, Union[int, float]) + assert_type(record.required_date, datetime.date) + assert_type(record.required_datetime, datetime.datetime) + assert_type(record.required_duration, datetime.timedelta) + assert_type(record.required_email, str) + assert_type(record.required_float, float) + assert_type(record.required_integer, int) + assert_type(record.required_number, Union[int, float]) + assert_type(record.required_percent, Union[int, float]) + assert_type(record.required_phone, str) + assert_type(record.required_rating, int) + assert_type(record.required_rich_text, str) + assert_type(record.required_select, str) + assert_type(record.required_url, str) + + # Check the types of each field schema + assert_type(Movie.name.field_schema(), Union[schema.SingleLineTextFieldSchema, schema.MultilineTextFieldSchema]) + assert_type(Actor.name.field_schema(), schema.SingleLineTextFieldSchema) + assert_type(Actor.bio.field_schema(), schema.MultilineTextFieldSchema) + assert_type(EveryField.aitext.field_schema(), schema.AITextFieldSchema) + assert_type(EveryField.attachments.field_schema(), schema.MultipleAttachmentsFieldSchema) + assert_type(EveryField.autonumber.field_schema(), schema.AutoNumberFieldSchema) + assert_type(EveryField.barcode.field_schema(), schema.BarcodeFieldSchema) + assert_type(EveryField.button.field_schema(), schema.ButtonFieldSchema) + assert_type(EveryField.checkbox.field_schema(), schema.CheckboxFieldSchema) + assert_type(EveryField.collaborator.field_schema(), schema.SingleCollaboratorFieldSchema) + assert_type(EveryField.count.field_schema(), schema.CountFieldSchema) + assert_type(EveryField.created_by.field_schema(), schema.CreatedByFieldSchema) + assert_type(EveryField.created.field_schema(), schema.CreatedTimeFieldSchema) + assert_type(EveryField.currency.field_schema(), schema.CurrencyFieldSchema) + assert_type(EveryField.date.field_schema(), schema.DateFieldSchema) + assert_type(EveryField.datetime.field_schema(), schema.DateTimeFieldSchema) + assert_type(EveryField.duration.field_schema(), schema.DurationFieldSchema) + assert_type(EveryField.email.field_schema(), schema.EmailFieldSchema) + assert_type(EveryField.float.field_schema(), schema.NumberFieldSchema) + assert_type(EveryField.integer.field_schema(), schema.NumberFieldSchema) + assert_type(EveryField.last_modified_by.field_schema(), schema.LastModifiedByFieldSchema) + assert_type(EveryField.last_modified.field_schema(), schema.LastModifiedTimeFieldSchema) + assert_type(EveryField.multi_user.field_schema(), schema.MultipleCollaboratorsFieldSchema) + assert_type(EveryField.multi_select.field_schema(), schema.MultipleSelectsFieldSchema) + assert_type(EveryField.number.field_schema(), schema.NumberFieldSchema) + assert_type(EveryField.percent.field_schema(), schema.PercentFieldSchema) + assert_type(EveryField.phone.field_schema(), schema.PhoneNumberFieldSchema) + assert_type(EveryField.rating.field_schema(), schema.RatingFieldSchema) + assert_type(EveryField.rich_text.field_schema(), schema.RichTextFieldSchema) + assert_type(EveryField.select.field_schema(), schema.SingleSelectFieldSchema) + assert_type(EveryField.url.field_schema(), schema.UrlFieldSchema) + assert_type(EveryField.required_aitext.field_schema(), schema.AITextFieldSchema) + assert_type(EveryField.required_barcode.field_schema(), schema.BarcodeFieldSchema) + assert_type(EveryField.required_collaborator.field_schema(), schema.SingleCollaboratorFieldSchema) + assert_type(EveryField.required_count.field_schema(), schema.CountFieldSchema) + assert_type(EveryField.required_currency.field_schema(), schema.CurrencyFieldSchema) + assert_type(EveryField.required_date.field_schema(), schema.DateFieldSchema) + assert_type(EveryField.required_datetime.field_schema(), schema.DateTimeFieldSchema) + assert_type(EveryField.required_duration.field_schema(), schema.DurationFieldSchema) + assert_type(EveryField.required_email.field_schema(), schema.EmailFieldSchema) + assert_type(EveryField.required_float.field_schema(), schema.NumberFieldSchema) + assert_type(EveryField.required_integer.field_schema(), schema.NumberFieldSchema) + assert_type(EveryField.required_number.field_schema(), schema.NumberFieldSchema) + assert_type(EveryField.required_percent.field_schema(), schema.PercentFieldSchema) + assert_type(EveryField.required_phone.field_schema(), schema.PhoneNumberFieldSchema) + assert_type(EveryField.required_rating.field_schema(), schema.RatingFieldSchema) + assert_type(EveryField.required_rich_text.field_schema(), schema.RichTextFieldSchema) + assert_type(EveryField.required_select.field_schema(), schema.SingleSelectFieldSchema) + assert_type(EveryField.required_url.field_schema(), schema.UrlFieldSchema) + assert_type(EveryField.meta.table.schema().field("Anything"), schema.FieldSchema) + # fmt: on + + # Check that the type system allows create-style dicts in all places + record.attachments.append({"id": "att123"}) + record.attachments.append({"url": "example.com"}) + record.attachments.append({"url": "example.com", "filename": "a.jpg"}) + record.attachments = [{"id": "att123"}] + record.attachments = [{"url": "example.com"}] + record.attachments = [{"url": "example.com", "filename": "a.jpg"}] + record.collaborator = {"id": "usr123"} + record.collaborator = {"email": "alice@example.com"} + record.required_collaborator = {"id": "usr123"} + record.required_collaborator = {"email": "alice@example.com"} + record.multi_user.append({"id": "usr123"}) + record.multi_user.append({"email": "alice@example.com"}) + + # Test type annotations for the formulas module + formula = F.Formula("{Name} = 'Bob'") + assert_type(formula & formula, F.Formula) + assert_type(formula | formula, F.Formula) + assert_type(~formula, F.Formula) + assert_type(formula ^ formula, F.Formula) + assert_type(formula & True, F.Formula) + assert_type(formula | False, F.Formula) + assert_type(formula ^ "literal", F.Formula) + assert_type(F.match({"Name": "Bob"}), F.Formula) + assert_type(F.to_formula(formula), F.Formula) + assert_type(F.to_formula(1), F.Formula) + assert_type(F.to_formula(True), F.Formula) + assert_type(F.to_formula("Bob"), F.Formula) + assert_type(F.CONCATENATE(1, 2, 3), F.FunctionCall) diff --git a/tests/test_url_escape.py b/tests/test_url_escape.py index bb6c06bf..240edb0c 100644 --- a/tests/test_url_escape.py +++ b/tests/test_url_escape.py @@ -17,4 +17,4 @@ def test_url_escape(base, table_name, escaped): table names (which Airtable *will* allow). """ table = base.table(table_name) - assert table.url.endswith(escaped) + assert table.urls.records.endswith(escaped) diff --git a/tests/test_utils.py b/tests/test_utils.py index 2e2404be..30c734eb 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,22 +1,30 @@ -from datetime import date, datetime +from datetime import date, datetime, timezone +from functools import partial import pytest from pyairtable import utils +from pyairtable.testing import fake_record + +utc_tz = partial(datetime, tzinfo=timezone.utc) @pytest.mark.parametrize( - "datetime_obj,datetime_str", + "dt_obj,dt_str", [ - (datetime(2000, 1, 2, 3, 4, 5, 0), "2000-01-02T03:04:05.000Z"), - (datetime(2025, 12, 31, 23, 59, 59, 0), "2025-12-31T23:59:59.000Z"), - (datetime(2025, 12, 31, 23, 59, 59, 5_000), "2025-12-31T23:59:59.005Z"), - (datetime(2025, 12, 31, 23, 59, 59, 555_000), "2025-12-31T23:59:59.555Z"), + (datetime(2000, 1, 2, 3, 4, 5, 0), "2000-01-02T03:04:05.000"), + (datetime(2025, 12, 31, 23, 59, 59, 0), "2025-12-31T23:59:59.000"), + (datetime(2025, 12, 31, 23, 59, 59, 5_000), "2025-12-31T23:59:59.005"), + (datetime(2025, 12, 31, 23, 59, 59, 555_000), "2025-12-31T23:59:59.555"), + (utc_tz(2000, 1, 2, 3, 4, 5, 0), "2000-01-02T03:04:05.000Z"), + (utc_tz(2025, 12, 31, 23, 59, 59, 0), "2025-12-31T23:59:59.000Z"), + (utc_tz(2025, 12, 31, 23, 59, 59, 5_000), "2025-12-31T23:59:59.005Z"), + (utc_tz(2025, 12, 31, 23, 59, 59, 555_000), "2025-12-31T23:59:59.555Z"), ], ) -def test_datetime_utils(datetime_obj, datetime_str): - assert utils.datetime_to_iso_str(datetime_obj) == datetime_str - assert utils.datetime_from_iso_str(datetime_str) == datetime_obj +def test_datetime_utils(dt_obj, dt_str): + assert utils.datetime_to_iso_str(dt_obj) == dt_str + assert utils.datetime_from_iso_str(dt_str) == dt_obj @pytest.mark.parametrize( @@ -32,8 +40,218 @@ def test_date_utils(date_obj, date_str): def test_attachment(): - assert utils.attachment("https://url.com") == {"url": "https://url.com"} - assert utils.attachment("https://url.com", filename="test.jpg") == { - "url": "https://url.com", - "filename": "test.jpg", - } + with pytest.deprecated_call(): + assert utils.attachment("https://url.com") == {"url": "https://url.com"} + + with pytest.deprecated_call(): + assert utils.attachment("https://url.com", filename="test.jpg") == { + "url": "https://url.com", + "filename": "test.jpg", + } + + +@pytest.mark.parametrize( + "func,value,expected", + [ + (utils.is_airtable_id, -1, False), + (utils.is_airtable_id, "appAkBDICXDqESDhF", True), + (utils.is_airtable_id, "app0000000000Fake", True), + (utils.is_airtable_id, "appWrongLength", False), + (utils.is_record_id, "rec0000000000Fake", True), + (utils.is_record_id, "app0000000000Fake", False), + (utils.is_base_id, "app0000000000Fake", True), + (utils.is_base_id, "rec0000000000Fake", False), + (utils.is_table_id, "tbl0000000000Fake", True), + (utils.is_table_id, "rec0000000000Fake", False), + (utils.is_field_id, "fld0000000000Fake", True), + (utils.is_field_id, "rec0000000000Fake", False), + ], +) +def test_id_check(func, value, expected): + assert func(value) is expected + + +@pytest.mark.parametrize( + "func,input,expected", + [ + (utils.coerce_iso_str, None, None), + (utils.coerce_iso_str, "asdf", ValueError), + (utils.coerce_iso_str, -1, TypeError), + (utils.coerce_iso_str, "2023-01-01", "2023-01-01"), + (utils.coerce_iso_str, "2023-01-01 12:34:56", "2023-01-01 12:34:56"), + (utils.coerce_iso_str, date(2023, 1, 1), "2023-01-01"), + ( + utils.coerce_iso_str, + datetime(2023, 1, 1, 12, 34, 56), + "2023-01-01T12:34:56", + ), + (utils.coerce_list_str, None, []), + (utils.coerce_list_str, "asdf", ["asdf"]), + (utils.coerce_list_str, ("one", "two", "three"), ["one", "two", "three"]), + (utils.coerce_list_str, -1, TypeError), + ], +) +def test_converter(func, input, expected): + if isinstance(expected, type) and issubclass(expected, Exception): + with pytest.raises(expected): + func(input) + return + + assert func(input) == expected + + +def test_fieldgetter(): + get_a = utils.fieldgetter("A") + get_abc = utils.fieldgetter("A", "B", "C") + + assert get_a(fake_record(A=1)) == 1 + assert get_a({"fields": {"A": 1}}) == 1 + assert get_abc(fake_record(A=1, C=3)) == (1, None, 3) + assert get_abc({"fields": {"A": 1, "C": 3}}) == (1, None, 3) + + record = fake_record(A="one", B="two") + assert get_a(record) == "one" + assert get_abc(record) == ("one", "two", None) + assert utils.fieldgetter("id")(record) == record["id"] + assert utils.fieldgetter("createdTime")(record) == record["createdTime"] + + +def test_fieldgetter__required(): + """ + Test that required=True means all fields are required. + """ + require_ab = utils.fieldgetter("A", "B", required=True) + record = fake_record(A="one", B="two") + assert require_ab(record) == ("one", "two") + with pytest.raises(KeyError): + require_ab(fake_record(A="one")) + + +def test_fieldgetter__required_list(): + """ + Test that required=["A", "B"] means only A and B are required. + """ + get_abc_require_ab = utils.fieldgetter("A", "B", "C", required=["A", "B"]) + record = fake_record(A="one", B="two") + assert get_abc_require_ab(record) == ("one", "two", None) + with pytest.raises(KeyError): + get_abc_require_ab(fake_record(A="one", C="three")) + + +def test_fieldgetter__required_str(): + """ + Test that required="Bravo" means only Bravo is required, + rather than ["B", "r", "a", "v", "o"]. + """ + get_abc_require_b = utils.fieldgetter("Alpha", "Bravo", required="Bravo") + record = fake_record(Alpha="one", Bravo="two") + assert get_abc_require_b(record) == ("one", "two") + with pytest.raises(KeyError): + get_abc_require_b(fake_record(Alpha="one")) + + +def test_url_builder(base): + class Example(utils.UrlBuilder): + static = "one/two/three" + with_attr = "id/{id}" + with_self_attr = "self.id/{self.id}" + with_property = "self.name/{self.name}" + _ignored = "ignored" + + urls = Example(base) + assert urls.static == "https://api.airtable.com/v0/one/two/three" + assert urls.with_attr == f"https://api.airtable.com/v0/id/{base.id}" + assert urls.with_self_attr == f"https://api.airtable.com/v0/self.id/{base.id}" + assert urls.with_property == f"https://api.airtable.com/v0/self.name/{base.name}" + assert urls._ignored == "ignored" + + +@pytest.mark.parametrize("obj", [None, object(), {"api": object()}]) +def test_url_builder__invalid_context(obj): + with pytest.raises(TypeError): + utils.UrlBuilder(obj) + + +def test_url_builder__modifies_docstring(): + """ + This test is a bit meta, but it ensures that anyone else who wants to use UrlBuilder + can skip docstring creation by passing skip_docstring=True + """ + + class NormalBehavior(utils.UrlBuilder): + test = utils.Url("https://example.com") + + class MissingDocstring(utils.UrlBuilder, skip_docstring=True): + test = utils.Url("https://example.com") + + class ExistingDocstring(utils.UrlBuilder, skip_docstring=True): + """This is the docstring.""" + + test = utils.Url("https://example.com") + + assert "URLs associated with :class:" in NormalBehavior.__doc__ + assert MissingDocstring.__doc__ is None + assert ExistingDocstring.__doc__ == "This is the docstring." + + +def test_url(): + v = utils.Url("https://example.com") + assert v == "https://example.com" + assert v / "foo/bar" / "baz" == "https://example.com/foo/bar/baz" + assert v // [1, 2, "a", "b"] == "https://example.com/1/2/a/b" + assert v & {"a": 1, "b": [2, 3, 4]} == "https://example.com?a=1&b=2&b=3&b=4" + assert v.add_path(1, 2, "a", "b") == "https://example.com/1/2/a/b" + assert v.add_qs({"a": 1}, b=[2, 3, 4]) == "https://example.com?a=1&b=2&b=3&b=4" + + with pytest.raises(TypeError): + v.add_path() + with pytest.raises(TypeError): + v.add_qs() + + +def test_url__parse(): + v = utils.Url("https://example.com:443/asdf?a=1&b=2&b=3#foo") + parsed = v._parse() + assert parsed.scheme == "https" + assert parsed.netloc == "example.com:443" + assert parsed.path == "/asdf" + assert parsed.query == "a=1&b=2&b=3" + assert parsed.fragment == "foo" + assert parsed.hostname == "example.com" + assert parsed.port == 443 + + +def test_url__replace(): + v = utils.Url("https://example.com:443/asdf?a=1&b=2&b=3#foo") + assert v.replace_url(netloc="foo.com") == "https://foo.com/asdf?a=1&b=2&b=3#foo" + + +def test_url_cannot_append_after_params(): + # cannot add path segments after params + v = utils.Url("https://example.com?a=1&b=2") + with pytest.raises(ValueError): + v / "foo" + with pytest.raises(ValueError): + v // ["foo", "bar"] + + +@pytest.mark.parametrize( + "docstring,expected", + [ + ("", ""), + ( + "This is a\ndocstring.", + "|enterprise_only|\n\nThis is a\ndocstring.", + ), + ( + "\t This is a\n\t docstring.", + "\t |enterprise_only|\n\n\t This is a\n\t docstring.", + ), + ], +) +def test_enterprise_docstring(docstring, expected): + @utils.enterprise_only + class Foo: + __doc__ = docstring + + assert Foo.__doc__ == expected diff --git a/tox.ini b/tox.ini index 58bdfffc..d0a0853f 100644 --- a/tox.ini +++ b/tox.ini @@ -1,60 +1,76 @@ [tox] envlist = pre-commit - mypy - mypy-pydantic1 - py3{8,9,10,11}-requests{min,max} - py3{8,9,10,11}-pydantic1 + mypy-py3{10,11,12,13,14} + py3{10,11,12,13,14}{,-requestsmin} + integration coverage [gh-actions] python = - 3.8: py38, coverage - 3.9: py39 - 3.10: py310 - 3.11: py311 - -[testenv:pre-commit] -deps = pre-commit -commands = pre-commit run --all-files - -[testenv:mypy] -deps = -r requirements-dev.txt -commands = mypy --strict pyairtable tests/test_typing.py + 3.10: py310, mypy-py310 + 3.11: py311, mypy-py311 + 3.12: coverage, mypy-py312 + 3.13: py313, mypy-py313 + 3.14: py314, mypy-py314 [testenv] -passenv = AIRTABLE_API_KEY +passenv = + AIRTABLE_API_KEY + AIRTABLE_ENTERPRISE_ID addopts = -v testpaths = tests -commands = python -m pytest {posargs} +commands = python -m pytest {posargs:-m 'not integration'} +extras = cli deps = -r requirements-test.txt requestsmin: requests==2.22.0 # Keep in sync with setup.cfg - requestsmax: requests>=2.22.0 # Keep in sync with setup.cfg - pydantic1: pydantic<2 # Lots of projects still use 1.x + +[testenv:pre-commit] +deps = pre-commit +commands = pre-commit run --all-files + +[testenv:mypy,mypy-py3{10,11,12,13,14}] +basepython = + py310: python3.10 + py311: python3.11 + py312: python3.12 + py313: python3.13 + py314: python3.14 +deps = -r requirements-dev.txt +commands = mypy --strict pyairtable scripts tests/test_typing.py + +[testenv:integration] +commands = + python -m pytest -m integration [testenv:coverage] passenv = COVERAGE_FORMAT commands = - python -m pytest -m 'not integration' --cov=pyairtable --cov-report={env:COVERAGE_FORMAT:html} + python -m pytest -m 'not integration' \ + --cov=pyairtable \ + --cov-report={env:COVERAGE_FORMAT:html} \ + --cov-report=term-missing \ + --cov-fail-under=100 [testenv:docs] -basepython = python3.8 +basepython = python3.10 deps = -r requirements-dev.txt commands = - python -m cogapp -r --verbosity=1 {toxinidir}/docs/source/*.rst + python -m cogapp -cr --verbosity=1 {toxinidir}/docs/source/*.rst python -m sphinx -T -E -b html {toxinidir}/docs/source {toxinidir}/docs/build [pytest] +requests_mock_case_sensitive = true markers = integration: integration tests, hit airtable api [flake8] filename = *.py count = True -# Per Black Formmater Documentation -ignore = E203, E266, E501, W503 +# See https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html +ignore = E203, E226, E266, E501, E704, W503 select = B,C,E,F,W,T4,B9 max-line-length = 88 max-complexity = 15 @@ -70,3 +86,10 @@ omit = tests/* .venv/* .tox/* + +[coverage:report] +# See https://github.com/nedbat/coveragepy/issues/970 +exclude_also = + @overload + if (typing\.)?TYPE_CHECKING: + \)( -> .+)?: \.\.\.$