From 3c4c180ac49dbe281063b039dbc408d3ea5ca0c0 Mon Sep 17 00:00:00 2001 From: Infant Alex Date: Mon, 18 Mar 2024 17:38:33 +0100 Subject: [PATCH] Pydantic vendoring (#137) * Pydantic vendoring * Updating dependency versions * fixing issues related to mypy * Poetry updates * revering the changes to the public docs * Updated readme file * Changelog updates * pyproject updates * adding latest version of the dask package * fixing issues related to poetry lock file * poetry.lock updates * Poetry updates on github actions * adding init file * dask version mismatch * dask version upgrades --- .github/workflows/ci.yaml | 4 +- .github/workflows/publish.yaml | 4 +- .pre-commit-config.yaml | 1 + CHANGELOG.md | 14 + README.md | 5 + cognite/cdffs/__init__.py | 2 +- cognite/cdffs/credentials.py | 44 +- docs/source/index.rst | 8 +- mypy.ini | 4 + poetry.lock | 507 ++++------- pyproject.toml | 17 +- vendor/__init__.py | 0 vendor/pydantic/__init__.py | 0 vendor/pydantic/annotated_types.py | 72 ++ vendor/pydantic/class_validators.py | 361 ++++++++ vendor/pydantic/color.py | 494 +++++++++++ vendor/pydantic/config.py | 190 ++++ vendor/pydantic/dataclasses.py | 500 +++++++++++ vendor/pydantic/datetime_parse.py | 248 ++++++ vendor/pydantic/env_settings.py | 350 ++++++++ vendor/pydantic/error_wrappers.py | 161 ++++ vendor/pydantic/errors.py | 646 ++++++++++++++ vendor/pydantic/fields.py | 1256 +++++++++++++++++++++++++++ vendor/pydantic/json.py | 112 +++ vendor/pydantic/main.py | 1107 +++++++++++++++++++++++ vendor/pydantic/networks.py | 747 ++++++++++++++++ vendor/pydantic/parse.py | 66 ++ vendor/pydantic/schema.py | 1163 +++++++++++++++++++++++++ vendor/pydantic/types.py | 1205 +++++++++++++++++++++++++ vendor/pydantic/typing.py | 597 +++++++++++++ vendor/pydantic/utils.py | 803 +++++++++++++++++ vendor/pydantic/validators.py | 765 ++++++++++++++++ vendor/pydantic/version.py | 38 + 33 files changed, 11125 insertions(+), 366 deletions(-) create mode 100644 vendor/__init__.py create mode 100644 vendor/pydantic/__init__.py create mode 100644 vendor/pydantic/annotated_types.py create mode 100644 vendor/pydantic/class_validators.py create mode 100644 vendor/pydantic/color.py create mode 100644 vendor/pydantic/config.py create mode 100644 vendor/pydantic/dataclasses.py create mode 100644 vendor/pydantic/datetime_parse.py create mode 100644 vendor/pydantic/env_settings.py create mode 100644 vendor/pydantic/error_wrappers.py create mode 100644 vendor/pydantic/errors.py create mode 100644 vendor/pydantic/fields.py create mode 100644 vendor/pydantic/json.py create mode 100644 vendor/pydantic/main.py create mode 100644 vendor/pydantic/networks.py create mode 100644 vendor/pydantic/parse.py create mode 100644 vendor/pydantic/schema.py create mode 100644 vendor/pydantic/types.py create mode 100644 vendor/pydantic/typing.py create mode 100644 vendor/pydantic/utils.py create mode 100644 vendor/pydantic/validators.py create mode 100644 vendor/pydantic/version.py diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index da16302..43126db 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -15,7 +15,7 @@ jobs: - name: Install dependencies run: | python3 -m pip install --upgrade pip poetry==1.2.2 - poetry install --extras "settings" + poetry install - name: Linting run: | @@ -33,7 +33,7 @@ jobs: - name: Install dependencies run: | python3 -m pip install --upgrade pip poetry==1.2.2 - poetry install --extras "settings" + poetry install - name: Run tests env: diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 937d63f..8dadd8e 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -23,7 +23,7 @@ jobs: run: | python3 -m pip install --upgrade pip poetry==1.2.2 poetry config virtualenvs.create false - poetry install --extras "settings" + poetry install - name: Linting run: | @@ -41,7 +41,7 @@ jobs: poetry run pytest tests/ -v --cov-report xml:coverage.xml --cov poetry run coverage xml - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 with: file: ./coverage.xml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1bda83f..35c45d8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,6 +25,7 @@ repos: rev: 6.1.0 hooks: - id: flake8 + exclude: ^(vendor/) additional_dependencies: ["flake8-pyproject"] - repo: https://github.com/PyCQA/pydocstyle rev: 6.3.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f8bba7..f09691b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -170,3 +170,17 @@ Changes are grouped as follows ### Fixed - Dependency updates for source code. - Updated Github actions to use the latest versions. + +## [0.3.4] - 2024-02-01 + +### Fixed +- Dependency updates for source code. +- Added support for pydantic v1 + +## [0.3.5] - 2024-02-01 + +### Fixed +- Dependency updates for source code. +- Updated Github actions to use the latest versions of Code Cov. +- Removed the multi-version support for pydantic. +- Introduced vendoring for pydantic v1. diff --git a/README.md b/README.md index 9ba7a15..fa9cf35 100644 --- a/README.md +++ b/README.md @@ -50,5 +50,10 @@ Important steps to follow when working with CDF Files using the `fsspec` support Refer [cdffs.readthedocs.io](https://cdffs.readthedocs.io) for more details. +## Vendoring + +`cdffs` uses pydandic.v1 package using vendoring. It was mainly introduced to overcome version conflicts +related to the Cognite Notebooks. + ## Contributing Want to contribute? Check out [CONTRIBUTING](CONTRIBUTING.md). diff --git a/cognite/cdffs/__init__.py b/cognite/cdffs/__init__.py index c24ec35..6ef35a8 100644 --- a/cognite/cdffs/__init__.py +++ b/cognite/cdffs/__init__.py @@ -4,7 +4,7 @@ from .spec import CdfFileSystem -__version__ = "0.3.4" +__version__ = "0.3.5" __all__ = ["CdfFileSystem"] fsspec.register_implementation(CdfFileSystem.protocol, CdfFileSystem) diff --git a/cognite/cdffs/credentials.py b/cognite/cdffs/credentials.py index 7444672..808806f 100644 --- a/cognite/cdffs/credentials.py +++ b/cognite/cdffs/credentials.py @@ -6,15 +6,9 @@ from cognite.client import ClientConfig, CogniteClient from cognite.client.credentials import OAuthClientCredentials, Token -try: - from pydantic import BaseSettings, ConfigDict, SecretStr, validator - - is_pydantic_v2 = False -except ImportError: - from pydantic import ConfigDict, SecretStr, field_validator - from pydantic_settings import BaseSettings - - is_pydantic_v2 = True +from vendor.pydantic.class_validators import validator +from vendor.pydantic.env_settings import BaseSettings +from vendor.pydantic.types import SecretStr def validate_scopes(cls: Any, value: str) -> Optional[List]: @@ -27,7 +21,14 @@ def validate_scopes(cls: Any, value: str) -> Optional[List]: class FsConfig(BaseSettings): """Base config to parse environment variables.""" - model_config: ConfigDict = ConfigDict(env_nested_delimiter="__", env_file=".env", env_file_encoding="utf-8") + class Config: + """Global config for Base Settings.""" + + env_nested_delimiter = "__" + env_file = ".env" + env_file_encoding = "utf-8" + + # model_config: ConfigDict = ConfigDict(env_nested_delimiter="__", env_file=".env", env_file_encoding="utf-8") class FsCredentials(FsConfig, ABC): @@ -79,10 +80,7 @@ class FsOAuthCredentials(FsCredentials, FsConfig): scopes: Optional[Union[str, List]] = None # Validator - if is_pydantic_v2: - _scopes = field_validator("scopes")(validate_scopes) - else: - _scopes = validator("scopes")(validate_scopes) + _scopes = validator("scopes")(validate_scopes) def get_credentials(self) -> OAuthClientCredentials: """Construct credentials based on environment variables. @@ -122,23 +120,13 @@ def get_credentials(self) -> Token: def get_connection_config(env_file: str) -> CogniteClient: """Construct Cognite Client from environment variables.""" - credentials = FsOAuthCredentials(_env_file=env_file) + credentials = FsOAuthCredentials(_env_file=env_file) # type:ignore connection_config = None - - if is_pydantic_v2: - credentials_dump = credentials.model_dump().items() - else: - credentials_dump = credentials.dict().items() - - if all(value is not None for _, value in credentials_dump): + if all(value is not None for _, value in credentials.dict().items()): connection_config = credentials.get_client_config() else: - token = FsToken(_env_file=env_file) - if is_pydantic_v2: - token_dump = token.model_dump().items() - else: - token_dump = token.dict().items() - if all(value is not None for _, value in token_dump): + token = FsToken(_env_file=env_file) # type:ignore + if all(value is not None for _, value in token.dict().items()): connection_config = token.get_client_config() return connection_config diff --git a/docs/source/index.rst b/docs/source/index.rst index 090ae18..c1f9c1d 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -9,13 +9,7 @@ the list of all supported/compatible python packages. Installation ^^^^^^^^^^^^ -To install this package(Recommended): - - .. code-block:: bash - - pip install cognite-cdffs[settings] - -If you need `cdffs` to be compatible with pydantic-v1, choose to install the expected pydantic-v1 (`^1.10.7`) and use, +To install this package: .. code-block:: bash diff --git a/mypy.ini b/mypy.ini index a9ce278..26938eb 100644 --- a/mypy.ini +++ b/mypy.ini @@ -9,3 +9,7 @@ ignore_missing_imports = false disallow_untyped_defs = true follow_imports = normal show_error_codes = true + + +[mypy-vendor.pydantic.*] +ignore_errors = true diff --git a/poetry.lock b/poetry.lock index 8cc3580..74d3405 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,17 +11,6 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - [[package]] name = "asciitree" version = "0.3.3" @@ -357,13 +346,13 @@ files = [ [[package]] name = "cognite-sdk" -version = "7.26.0" +version = "7.28.2" description = "Cognite Python SDK" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "cognite_sdk-7.26.0-py3-none-any.whl", hash = "sha256:38ae4c57e2e30383c882ddfcfb5fa1d4b702436ba4d785ae8fd0cdaa32044341"}, - {file = "cognite_sdk-7.26.0.tar.gz", hash = "sha256:de5b99fbd820a05465baa0aa6ea8d138ae158baf2607173ad42a2b8d0709aa2a"}, + {file = "cognite_sdk-7.28.2-py3-none-any.whl", hash = "sha256:5bd054f08c6dd68638d8eed80a3766b3a3111ccea7f4cf1fb08bd4fb257d9897"}, + {file = "cognite_sdk-7.28.2.tar.gz", hash = "sha256:07bbf2b375e7486290d5b9dcd17aba59f20001e5b55af90b0bf46ca44719b4f5"}, ] [package.dependencies] @@ -397,63 +386,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.3" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.dependencies] @@ -518,21 +507,24 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "dask" -version = "2024.2.1" +version = "2024.3.1" description = "Parallel PyData with Task Scheduling" optional = false python-versions = ">=3.9" files = [ - {file = "dask-2024.2.1-py3-none-any.whl", hash = "sha256:a13fcdeead3bab3576495023f83097adcffe2f03c371c241b5a1f0b232b35b38"}, - {file = "dask-2024.2.1.tar.gz", hash = "sha256:9504a1e9f5d8e5403fae931f9f1660d41f510f48895ccefce856ec6a4c2198d8"}, + {file = "dask-2024.3.1-py3-none-any.whl", hash = "sha256:1ac260b8716b1a9fc144c0d7f958336812cfc3ef542a3742c9ae02387189b32b"}, + {file = "dask-2024.3.1.tar.gz", hash = "sha256:78bee2ffd735514e572adaa669fc2a437ec256aecb6bec036a1f5b8dd36b2e60"}, ] [package.dependencies] click = ">=8.1" cloudpickle = ">=1.5.0" +dask-expr = {version = ">=1.0,<1.1", optional = true, markers = "extra == \"dataframe\""} fsspec = ">=2021.09.0" -importlib-metadata = ">=4.13.0" +importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} +numpy = {version = ">=1.21", optional = true, markers = "extra == \"array\""} packaging = ">=20.0" +pandas = {version = ">=1.3", optional = true, markers = "extra == \"dataframe\""} partd = ">=1.2.0" pyyaml = ">=5.3.1" toolz = ">=0.10.0" @@ -540,11 +532,27 @@ toolz = ">=0.10.0" [package.extras] array = ["numpy (>=1.21)"] complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] -dataframe = ["dask[array]", "pandas (>=1.3)"] +dataframe = ["dask-expr (>=1.0,<1.1)", "dask[array]", "pandas (>=1.3)"] diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] -distributed = ["distributed (==2024.2.1)"] +distributed = ["distributed (==2024.3.1)"] test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] +[[package]] +name = "dask-expr" +version = "1.0.3" +description = "High Level Expressions for Dask" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dask-expr-1.0.3.tar.gz", hash = "sha256:c7ae183ee434600deba7984a8d3a145b3fc65970e2341e54b78c4a83855e2016"}, + {file = "dask_expr-1.0.3-py3-none-any.whl", hash = "sha256:6820f424f594f063826167e8cebedb6aca03252f4fdb091edb90b6da445c3536"}, +] + +[package.dependencies] +dask = "2024.3.1" +pandas = ">=2" +pyarrow = ">=7.0.0" + [[package]] name = "distlib" version = "0.3.8" @@ -610,35 +618,35 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "fiona" -version = "1.9.5" +version = "1.9.6" description = "Fiona reads and writes spatial data files" optional = false python-versions = ">=3.7" files = [ - {file = "fiona-1.9.5-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:5f40a40529ecfca5294260316cf987a0420c77a2f0cf0849f529d1afbccd093e"}, - {file = "fiona-1.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:374efe749143ecb5cfdd79b585d83917d2bf8ecfbfc6953c819586b336ce9c63"}, - {file = "fiona-1.9.5-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:35dae4b0308eb44617cdc4461ceb91f891d944fdebbcba5479efe524ec5db8de"}, - {file = "fiona-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:5b4c6a3df53bee8f85bb46685562b21b43346be1fe96419f18f70fa1ab8c561c"}, - {file = "fiona-1.9.5-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:6ad04c1877b9fd742871b11965606c6a52f40706f56a48d66a87cc3073943828"}, - {file = "fiona-1.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fb9a24a8046c724787719e20557141b33049466145fc3e665764ac7caf5748c"}, - {file = "fiona-1.9.5-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:d722d7f01a66f4ab6cd08d156df3fdb92f0669cf5f8708ddcb209352f416f241"}, - {file = "fiona-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:7ede8ddc798f3d447536080c6db9a5fb73733ad8bdb190cb65eed4e289dd4c50"}, - {file = "fiona-1.9.5-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:8b098054a27c12afac4f819f98cb4d4bf2db9853f70b0c588d7d97d26e128c39"}, - {file = "fiona-1.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d9f29e9bcbb33232ff7fa98b4a3c2234db910c1dc6c4147fc36c0b8b930f2e0"}, - {file = "fiona-1.9.5-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:f1af08da4ecea5036cb81c9131946be4404245d1b434b5b24fd3871a1d4030d9"}, - {file = "fiona-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:c521e1135c78dec0d7774303e5a1b4c62e0efb0e602bb8f167550ef95e0a2691"}, - {file = "fiona-1.9.5-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:fce4b1dd98810cabccdaa1828430c7402d283295c2ae31bea4f34188ea9e88d7"}, - {file = "fiona-1.9.5-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:3ea04ec2d8c57b5f81a31200fb352cb3242aa106fc3e328963f30ffbdf0ff7c8"}, - {file = "fiona-1.9.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4877cc745d9e82b12b3eafce3719db75759c27bd8a695521202135b36b58c2e7"}, - {file = "fiona-1.9.5-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ac2c250f509ec19fad7959d75b531984776517ef3c1222d1cc5b4f962825880b"}, - {file = "fiona-1.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4df21906235928faad856c288cfea0298e9647f09c9a69a230535cbc8eadfa21"}, - {file = "fiona-1.9.5-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:81d502369493687746cb8d3cd77e5ada4447fb71d513721c9a1826e4fb32b23a"}, - {file = "fiona-1.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:ce3b29230ef70947ead4e701f3f82be81082b7f37fd4899009b1445cc8fc276a"}, - {file = "fiona-1.9.5-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:8b53ce8de773fcd5e2e102e833c8c58479edd8796a522f3d83ef9e08b62bfeea"}, - {file = "fiona-1.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bd2355e859a1cd24a3e485c6dc5003129f27a2051629def70036535ffa7e16a4"}, - {file = "fiona-1.9.5-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:9a2da52f865db1aff0eaf41cdd4c87a7c079b3996514e8e7a1ca38457309e825"}, - {file = "fiona-1.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:cfef6db5b779d463298b1113b50daa6c5b55f26f834dc9e37752116fa17277c1"}, - {file = "fiona-1.9.5.tar.gz", hash = "sha256:99e2604332caa7692855c2ae6ed91e1fffdf9b59449aa8032dd18e070e59a2f7"}, + {file = "fiona-1.9.6-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:63e528b5ea3d8b1038d788e7c65117835c787ba7fdc94b1b42f09c2cbc0aaff2"}, + {file = "fiona-1.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:918bd27d8625416672e834593970f96dff63215108f81efb876fe5c0bc58a3b4"}, + {file = "fiona-1.9.6-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:e313210b30d09ed8f829bf625599e248dadd78622728030221f6526580ff26c5"}, + {file = "fiona-1.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:89095c2d542325ee45894b8837e8048cdbb2f22274934e1be3b673ca628010d7"}, + {file = "fiona-1.9.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:98cea6f435843b2119731c6b0470e5b7386aa16b6aa7edabbf1ed93aefe029c3"}, + {file = "fiona-1.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4230eccbd896a79d1ebfa551d84bf90f512f7bcbe1ca61e3f82231321f1a532"}, + {file = "fiona-1.9.6-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:48b6218224e96de5e36b5eb259f37160092260e5de0dcd82ca200b1887aa9884"}, + {file = "fiona-1.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:c1dd5fbc29b7303bb87eb683455e8451e1a53bb8faf20ef97fdcd843c9e4a7f6"}, + {file = "fiona-1.9.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:42d8a0e5570948d3821c493b6141866d9a4d7a64edad2be4ecbb89f81904baac"}, + {file = "fiona-1.9.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39819fb8f5ec6d9971cb01b912b4431615a3d3f50c83798565d8ce41917930db"}, + {file = "fiona-1.9.6-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:9b53034efdf93ada9295b081e6a8280af7c75496a20df82d4c2ca46d65b85905"}, + {file = "fiona-1.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:1dcd6eca7524535baf2a39d7981b4a46d33ae28c313934a7c3eae62eecf9dfa5"}, + {file = "fiona-1.9.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e5404ed08c711489abcb3a50a184816825b8af06eb73ad2a99e18b8e7b47c96a"}, + {file = "fiona-1.9.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:53bedd2989e255df1bf3378ae9c06d6d241ec273c280c544bb44ffffebb97fb0"}, + {file = "fiona-1.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:77653a08564a44e634c44cd74a068d2f55d1d4029edd16d1c8aadcc4d8cc1d2c"}, + {file = "fiona-1.9.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e7617563b36d2be99f048f0d0054b4d765f4aae454398f88f19de9c2c324b7f8"}, + {file = "fiona-1.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50037c3b7a5f6f434b562b5b1a5b664f1caa7a4383b00af23cdb59bfc6ba852c"}, + {file = "fiona-1.9.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:bf51846ad602757bf27876f458c5c9f14b09421fac612f64273cc4e3fcabc441"}, + {file = "fiona-1.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:11af1afc1255642a7787fe112c29d01f968f1053e4d4700fc6f3bb879c1622e0"}, + {file = "fiona-1.9.6-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:52e8fec650b72fc5253d8f86b63859acc687182281c29bfacd3930496cf982d1"}, + {file = "fiona-1.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9b92aa1badb2773e7cac19bef3064d73e9d80c67c42f0928db2520a04be6f2f"}, + {file = "fiona-1.9.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0eaffbf3bfae9960484c0c08ea461b0c40e111497f04e9475ebf15ac7a22d9dc"}, + {file = "fiona-1.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f1b49d51a744874608b689f029766aa1e078dd72e94b44cf8eeef6d7bd2e9051"}, + {file = "fiona-1.9.6.tar.gz", hash = "sha256:791b3494f8b218c06ea56f892bd6ba893dfa23525347761d066fb7738acda3b1"}, ] [package.dependencies] @@ -648,14 +656,13 @@ click = ">=8.0,<9.0" click-plugins = ">=1.0" cligj = ">=0.5" importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} -setuptools = "*" six = "*" [package.extras] -all = ["Fiona[calc,s3,test]"] +all = ["fiona[calc,s3,test]"] calc = ["shapely"] s3 = ["boto3 (>=1.3.1)"] -test = ["Fiona[s3]", "pytest (>=7)", "pytest-cov", "pytz"] +test = ["fiona[s3]", "pytest (>=7)", "pytest-cov", "pytz"] [[package]] name = "flake8" @@ -795,22 +802,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.0.2" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, + {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1205,13 +1212,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1401,67 +1408,67 @@ virtualenv = ">=20.10.0" [[package]] name = "protobuf" -version = "4.25.3" +version = "5.26.0" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.26.0-cp310-abi3-win32.whl", hash = "sha256:f9ecc8eb6f18037e0cbf43256db0325d4723f429bca7ef5cd358b7c29d65f628"}, + {file = "protobuf-5.26.0-cp310-abi3-win_amd64.whl", hash = "sha256:dfd29f6eb34107dccf289a93d44fb6b131e68888d090b784b691775ac84e8213"}, + {file = "protobuf-5.26.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:7e47c57303466c867374a17b2b5e99c5a7c8b72a94118e2f28efb599f19b4069"}, + {file = "protobuf-5.26.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e184175276edc222e2d5e314a72521e10049938a9a4961fe4bea9b25d073c03f"}, + {file = "protobuf-5.26.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:6ee9d1aa02f951c5ce10bf8c6cfb7604133773038e33f913183c8b5201350600"}, + {file = "protobuf-5.26.0-cp38-cp38-win32.whl", hash = "sha256:2c334550e1cb4efac5c8a3987384bf13a4334abaf5ab59e40479e7b70ecd6b19"}, + {file = "protobuf-5.26.0-cp38-cp38-win_amd64.whl", hash = "sha256:8eef61a90631c21b06b4f492a27e199a269827f046de3bb68b61aa84fcf50905"}, + {file = "protobuf-5.26.0-cp39-cp39-win32.whl", hash = "sha256:ca825f4eecb8c342d2ec581e6a5ad1ad1a47bededaecd768e0d3451ae4aaac2b"}, + {file = "protobuf-5.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:efd4f5894c50bd76cbcfdd668cd941021333861ed0f441c78a83d8254a01cc9f"}, + {file = "protobuf-5.26.0-py3-none-any.whl", hash = "sha256:a49b6c5359bf34fb7bf965bf21abfab4476e4527d822ab5289ee3bf73f291159"}, + {file = "protobuf-5.26.0.tar.gz", hash = "sha256:82f5870d74c99addfe4152777bdf8168244b9cf0ac65f8eccf045ddfa9d80d9b"}, ] [[package]] name = "pyarrow" -version = "15.0.0" +version = "15.0.1" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, - {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, - {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, - {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, - {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, - {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, - {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, - {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, - {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, - {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, - {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, - {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, - {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, - {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, - {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, - {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, - {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c2ddb3be5ea938c329a84171694fc230b241ce1b6b0ff1a0280509af51c375fa"}, + {file = "pyarrow-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7543ea88a0ff72f8e6baaf9bfdbec2c62aeabdbede9e4a571c71cc3bc43b6302"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1519e218a6941fc074e4501088d891afcb2adf77c236e03c34babcf3d6a0d1c7"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28cafa86e1944761970d3b3fc0411b14ff9b5c2b73cd22aaf470d7a3976335f5"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:be5c3d463e33d03eab496e1af7916b1d44001c08f0f458ad27dc16093a020638"}, + {file = "pyarrow-15.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:47b1eda15d3aa3f49a07b1808648e1397e5dc6a80a30bf87faa8e2d02dad7ac3"}, + {file = "pyarrow-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e524a31be7db22deebbbcf242b189063ab9a7652c62471d296b31bc6e3cae77b"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a476fefe8bdd56122fb0d4881b785413e025858803cc1302d0d788d3522b374d"}, + {file = "pyarrow-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:309e6191be385f2e220586bfdb643f9bb21d7e1bc6dd0a6963dc538e347b2431"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83bc586903dbeb4365cbc72b602f99f70b96c5882e5dfac5278813c7d624ca3c"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07e652daac6d8b05280cd2af31c0fb61a4490ec6a53dc01588014d9fa3fdbee9"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:abad2e08652df153a72177ce20c897d083b0c4ebeec051239e2654ddf4d3c996"}, + {file = "pyarrow-15.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cde663352bc83ad75ba7b3206e049ca1a69809223942362a8649e37bd22f9e3b"}, + {file = "pyarrow-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1b6e237dd7a08482a8b8f3f6512d258d2460f182931832a8c6ef3953203d31e1"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7bd167536ee23192760b8c731d39b7cfd37914c27fd4582335ffd08450ff799d"}, + {file = "pyarrow-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c08bb31eb2984ba5c3747d375bb522e7e536b8b25b149c9cb5e1c49b0ccb736"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f9c1d630ed2524bd1ddf28ec92780a7b599fd54704cd653519f7ff5aec177a"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5186048493395220550bca7b524420471aac2d77af831f584ce132680f55c3df"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:31dc30c7ec8958da3a3d9f31d6c3630429b2091ede0ecd0d989fd6bec129f0e4"}, + {file = "pyarrow-15.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3f111a014fb8ac2297b43a74bf4495cc479a332908f7ee49cb7cbd50714cb0c1"}, + {file = "pyarrow-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a6d1f7c15d7f68f08490d0cb34611497c74285b8a6bbeab4ef3fc20117310983"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:9ad931b996f51c2f978ed517b55cb3c6078272fb4ec579e3da5a8c14873b698d"}, + {file = "pyarrow-15.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:738f6b53ab1c2f66b2bde8a1d77e186aeaab702d849e0dfa1158c9e2c030add3"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c1c3fc16bc74e33bf8f1e5a212938ed8d88e902f372c4dac6b5bad328567d2f"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1fa92512128f6c1b8dde0468c1454dd70f3bff623970e370d52efd4d24fd0be"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b4157f307c202cbbdac147d9b07447a281fa8e63494f7fc85081da351ec6ace9"}, + {file = "pyarrow-15.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:b75e7da26f383787f80ad76143b44844ffa28648fcc7099a83df1538c078d2f2"}, + {file = "pyarrow-15.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a99eac76ae14096c209850935057b9e8ce97a78397c5cde8724674774f34e5d"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:dd532d3177e031e9b2d2df19fd003d0cc0520d1747659fcabbd4d9bb87de508c"}, + {file = "pyarrow-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce8c89848fd37e5313fc2ce601483038ee5566db96ba0808d5883b2e2e55dc53"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:862eac5e5f3b6477f7a92b2f27e560e1f4e5e9edfca9ea9da8a7478bb4abd5ce"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f0ea3a29cd5cb99bf14c1c4533eceaa00ea8fb580950fb5a89a5c771a994a4e"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb902f780cfd624b2e8fd8501fadab17618fdb548532620ef3d91312aaf0888a"}, + {file = "pyarrow-15.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:4f87757f02735a6bb4ad2e1b98279ac45d53b748d5baf52401516413007c6999"}, + {file = "pyarrow-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:efd3816c7fbfcbd406ac0f69873cebb052effd7cdc153ae5836d1b00845845d7"}, + {file = "pyarrow-15.0.1.tar.gz", hash = "sha256:21d812548d39d490e0c6928a7c663f37b96bf764034123d4b4ab4530ecc757a9"}, ] [package.dependencies] @@ -1489,135 +1496,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydantic" -version = "2.6.3" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.16.3" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-settings" -version = "2.2.1" -description = "Settings management using Pydantic" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"}, - {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"}, -] - -[package.dependencies] -pydantic = ">=2.3.0" -python-dotenv = ">=0.21.0" - -[package.extras] -toml = ["tomli (>=2.0.1)"] -yaml = ["pyyaml (>=6.0.1)"] - [[package]] name = "pyflakes" version = "3.2.0" @@ -1705,13 +1583,13 @@ certifi = "*" [[package]] name = "pytest" -version = "8.1.0" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.0-py3-none-any.whl", hash = "sha256:ee32db7af8de4629a455806befa90559f307424c07b8413ccfc30bf5b221dd7e"}, - {file = "pytest-8.1.0.tar.gz", hash = "sha256:f8fa04ab8f98d185113ae60ea6d79c22f8143b14bc1caeced44a0ab844928323"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -1894,13 +1772,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "1.4.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-1.4.0.tar.gz", hash = "sha256:acee623221e4a39abcbb919312c8ff04bd44e7e417087fb4bd5e2a2f53d5e79a"}, + {file = "requests_oauthlib-1.4.0-py2.py3-none-any.whl", hash = "sha256:7a3130d94a17520169e38db6c8d75f2c974643788465ecc2e4b36d288bf13033"}, ] [package.dependencies] @@ -1992,18 +1870,18 @@ jeepney = ">=0.6" [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2296,13 +2174,13 @@ files = [ [[package]] name = "twine" -version = "4.0.2" +version = "5.0.0" description = "Collection of utilities for publishing packages on PyPI" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"}, - {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"}, + {file = "twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0"}, + {file = "twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4"}, ] [package.dependencies] @@ -2318,13 +2196,13 @@ urllib3 = ">=1.26.0" [[package]] name = "types-requests" -version = "2.31.0.20240218" +version = "2.31.0.20240311" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240218.tar.gz", hash = "sha256:f1721dba8385958f504a5386240b92de4734e047a08a40751c1654d1ac3349c5"}, - {file = "types_requests-2.31.0.20240218-py3-none-any.whl", hash = "sha256:a82807ec6ddce8f00fe0e949da6d6bc1fbf1715420218a9640d695f70a9e5a9b"}, + {file = "types-requests-2.31.0.20240311.tar.gz", hash = "sha256:b1c1b66abfb7fa79aae09097a811c4aa97130eb8831c60e47aee4ca344731ca5"}, + {file = "types_requests-2.31.0.20240311-py3-none-any.whl", hash = "sha256:47872893d65a38e282ee9f277a4ee50d1b28bd592040df7d1fdaffdf3779937d"}, ] [package.dependencies] @@ -2415,13 +2293,13 @@ viz = ["matplotlib", "nc-time-axis", "seaborn"] [[package]] name = "zarr" -version = "2.17.0" +version = "2.17.1" description = "An implementation of chunked, compressed, N-dimensional arrays for Python" optional = false python-versions = ">=3.9" files = [ - {file = "zarr-2.17.0-py3-none-any.whl", hash = "sha256:d287cb61019c4a0a0f386f76eeaa7f0b1160b1cb90cf96173a4b6cbc135df6e1"}, - {file = "zarr-2.17.0.tar.gz", hash = "sha256:6390a2b8af31babaab4c963efc45bf1da7f9500c9aafac193f84cf019a7c66b0"}, + {file = "zarr-2.17.1-py3-none-any.whl", hash = "sha256:e25df2741a6e92645f3890f30f3136d5b57a0f8f831094b024bbcab5f2797bc7"}, + {file = "zarr-2.17.1.tar.gz", hash = "sha256:564b3aa072122546fe69a0fa21736f466b20fad41754334b62619f088ce46261"}, ] [package.dependencies] @@ -2436,23 +2314,20 @@ jupyter = ["ipytree (>=0.2.2)", "ipywidgets (>=8.0.0)", "notebook"] [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[extras] -settings = ["pydantic-settings"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.9.10,<3.13" -content-hash = "990136263ea0f942fc51d3aa453cc9d5658688df950bfe1973fea81ad5308b64" +content-hash = "180cb6bb307be19deade6939d4ea43e081f80d6b13e61acdffa05609cd2298fd" diff --git a/pyproject.toml b/pyproject.toml index 53883d3..b475cbc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite-cdffs" -version = "0.3.4" +version = "0.3.5" description = "File System Interface for CDF Files" license = "Apache-2.0" authors = ["Infant Alex "] @@ -8,11 +8,12 @@ readme = "README.md" packages = [ { include="cognite", from="." }, ] + [tool.poetry.group.test.dependencies] pandas = "^2.2.1" -pyarrow = "^15.0.0" -zarr = "^2.17.0" -dask = "^2024.2.1" +pyarrow = "^15.0.1" +zarr = "^2.17.1" +dask = {version = "^2024.3.1", extras = ["dataframe"]} xarray = "^2024.2.0" geodatasets = "^2023.12.0" geopandas = "^0.14.3" @@ -67,17 +68,13 @@ commands = [tool.poetry.dependencies] python = ">=3.9.10,<3.13" -cognite-sdk = "^7.26.0" +cognite-sdk = "^7.28.2" fsspec = "^2024.2.0" requests = "^2.31.0" -twine = "^4.0.2" -pydantic = {version =">=1.10.7"} +twine = "^5.0.0" python-dotenv = "^1.0.1" -pydantic-settings = { version = "^2.0.3", optional = true} tenacity = "^8.2.3" -[tool.poetry.extras] -settings = ["pydantic-settings"] [build-system] requires = ["poetry>=0.12"] diff --git a/vendor/__init__.py b/vendor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/vendor/pydantic/__init__.py b/vendor/pydantic/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/vendor/pydantic/annotated_types.py b/vendor/pydantic/annotated_types.py new file mode 100644 index 0000000..f6fda80 --- /dev/null +++ b/vendor/pydantic/annotated_types.py @@ -0,0 +1,72 @@ +import sys +from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type + +from .fields import Required +from .main import BaseModel, create_model +from .typing import is_typeddict, is_typeddict_special + +if TYPE_CHECKING: + from typing_extensions import TypedDict + +if sys.version_info < (3, 11): + + def is_legacy_typeddict(typeddict_cls: Type["TypedDict"]) -> bool: # type: ignore[valid-type] + return is_typeddict(typeddict_cls) and type(typeddict_cls).__module__ == "typing" + +else: + + def is_legacy_typeddict(_: Any) -> Any: + return False + + +def create_model_from_typeddict( + # Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030 + typeddict_cls: Type["TypedDict"], # type: ignore[valid-type] + **kwargs: Any, +) -> Type["BaseModel"]: + """ + Create a `BaseModel` based on the fields of a `TypedDict`. + Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys, + we raise an error if this happens (see https://bugs.python.org/issue38834). + """ + field_definitions: Dict[str, Any] + + # Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions` + if not hasattr(typeddict_cls, "__required_keys__"): + raise TypeError( + "You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. " + "Without it, there is no way to differentiate required and optional fields when subclassed." + ) + + if is_legacy_typeddict(typeddict_cls) and any( + is_typeddict_special(t) for t in typeddict_cls.__annotations__.values() + ): + raise TypeError( + "You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11. " + "Without it, there is no way to reflect Required/NotRequired keys." + ) + + required_keys: FrozenSet[str] = typeddict_cls.__required_keys__ # type: ignore[attr-defined] + field_definitions = { + field_name: (field_type, Required if field_name in required_keys else None) + for field_name, field_type in typeddict_cls.__annotations__.items() + } + + return create_model(typeddict_cls.__name__, **kwargs, **field_definitions) + + +def create_model_from_namedtuple(namedtuple_cls: Type["NamedTuple"], **kwargs: Any) -> Type["BaseModel"]: + """ + Create a `BaseModel` based on the fields of a named tuple. + A named tuple can be created with `typing.NamedTuple` and declared annotations + but also with `collections.namedtuple`, in this case we consider all fields + to have type `Any`. + """ + # With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic + namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, "__annotations__", None) or { + k: Any for k in namedtuple_cls._fields + } + field_definitions: Dict[str, Any] = { + field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items() + } + return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions) diff --git a/vendor/pydantic/class_validators.py b/vendor/pydantic/class_validators.py new file mode 100644 index 0000000..a316b3f --- /dev/null +++ b/vendor/pydantic/class_validators.py @@ -0,0 +1,361 @@ +import warnings +from collections import ChainMap +from functools import partial, partialmethod, wraps +from itertools import chain +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload + +from .errors import ConfigError +from .typing import AnyCallable +from .utils import ROOT_KEY, in_ipython + +if TYPE_CHECKING: + from .typing import AnyClassMethod + + +class Validator: + __slots__ = "func", "pre", "each_item", "always", "check_fields", "skip_on_failure" + + def __init__( + self, + func: AnyCallable, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = False, + skip_on_failure: bool = False, + ): + self.func = func + self.pre = pre + self.each_item = each_item + self.always = always + self.check_fields = check_fields + self.skip_on_failure = skip_on_failure + + +if TYPE_CHECKING: + from inspect import Signature + + from .config import BaseConfig + from .fields import ModelField + from .types import ModelOrDc + + ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] + ValidatorsList = List[ValidatorCallable] + ValidatorListDict = Dict[str, List[Validator]] + +_FUNCS: Set[str] = set() +VALIDATOR_CONFIG_KEY = "__validator_config__" +ROOT_VALIDATOR_CONFIG_KEY = "__root_validator_config__" + + +def validator( + *fields: str, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = True, + whole: Optional[bool] = None, + allow_reuse: bool = False, +) -> Callable[[AnyCallable], "AnyClassMethod"]: + """ + Decorate methods on the class indicating that they should be used to validate fields + :param fields: which field(s) the method should be called on + :param pre: whether or not this validator should be called before the standard validators (else after) + :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the + whole object + :param always: whether this method and other validators should be called even if the value is missing + :param check_fields: whether to check that the fields actually exist on the model + :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function + """ + if not fields: + raise ConfigError("validator with no fields specified") + elif isinstance(fields[0], FunctionType): + raise ConfigError( + "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 + "E.g. usage should be `@validator('', ...)`" + ) + elif not all(isinstance(field, str) for field in fields): + raise ConfigError( + "validator fields should be passed as separate string args. " # noqa: Q000 + "E.g. usage should be `@validator('', '', ...)`" + ) + + if whole is not None: + warnings.warn( + 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', + DeprecationWarning, + ) + assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' + each_item = not whole + + def dec(f: AnyCallable) -> "AnyClassMethod": + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, + VALIDATOR_CONFIG_KEY, + ( + fields, + Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), + ), + ) + return f_cls + + return dec + + +@overload +def root_validator(_func: AnyCallable) -> "AnyClassMethod": + ... + + +@overload +def root_validator( + *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Callable[[AnyCallable], "AnyClassMethod"]: + ... + + +def root_validator( + _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Union["AnyClassMethod", Callable[[AnyCallable], "AnyClassMethod"]]: + """ + Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either + before or after standard model parsing/validation is performed. + """ + if _func: + f_cls = _prepare_validator(_func, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + def dec(f: AnyCallable) -> "AnyClassMethod": + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + return dec + + +def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> "AnyClassMethod": + """ + Avoid validators with duplicated names since without this, validators can be overwritten silently + which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. + """ + f_cls = function if isinstance(function, classmethod) else classmethod(function) + if not in_ipython() and not allow_reuse: + ref = ( + getattr(f_cls.__func__, "__module__", "") + + "." + + getattr(f_cls.__func__, "__qualname__", f"") + ) + if ref in _FUNCS: + raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') + _FUNCS.add(ref) + return f_cls + + +class ValidatorGroup: + def __init__(self, validators: "ValidatorListDict") -> None: + self.validators = validators + self.used_validators = {"*"} + + def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: + self.used_validators.add(name) + validators = self.validators.get(name, []) + if name != ROOT_KEY: + validators += self.validators.get("*", []) + if validators: + return {getattr(v.func, "__name__", f""): v for v in validators} + else: + return None + + def check_for_unused(self) -> None: + unused_validators = set( + chain.from_iterable( + ( + getattr(v.func, "__name__", f"") + for v in self.validators[f] + if v.check_fields + ) + for f in (self.validators.keys() - self.used_validators) + ) + ) + if unused_validators: + fn = ", ".join(unused_validators) + raise ConfigError( + f"Validators defined with incorrect fields: {fn} " # noqa: Q000 + f"(use check_fields=False if you're inheriting from the model and intended this)" + ) + + +def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: + validators: Dict[str, List[Validator]] = {} + for var_name, value in namespace.items(): + validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) + if validator_config: + fields, v = validator_config + for field in fields: + if field in validators: + validators[field].append(v) + else: + validators[field] = [v] + return validators + + +def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]: + from inspect import signature + + pre_validators: List[AnyCallable] = [] + post_validators: List[Tuple[bool, AnyCallable]] = [] + for name, value in namespace.items(): + validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) + if validator_config: + sig = signature(validator_config.func) + args = list(sig.parameters.keys()) + if args[0] == "self": + raise ConfigError( + f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' + f"should be: (cls, values)." + ) + if len(args) != 2: + raise ConfigError(f"Invalid signature for root validator {name}: {sig}, should be: (cls, values).") + # check function signature + if validator_config.pre: + pre_validators.append(validator_config.func) + else: + post_validators.append((validator_config.skip_on_failure, validator_config.func)) + return pre_validators, post_validators + + +def inherit_validators(base_validators: "ValidatorListDict", validators: "ValidatorListDict") -> "ValidatorListDict": + for field, field_validators in base_validators.items(): + if field not in validators: + validators[field] = [] + validators[field] += field_validators + return validators + + +def make_generic_validator(validator: AnyCallable) -> "ValidatorCallable": + """ + Make a generic function which calls a validator with the right arguments. + + Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, + hence this laborious way of doing things. + + It's done like this so validators don't all need **kwargs in their signature, eg. any combination of + the arguments "values", "fields" and/or "config" are permitted. + """ + from inspect import signature + + if not isinstance(validator, (partial, partialmethod)): + # This should be the default case, so overhead is reduced + sig = signature(validator) + args = list(sig.parameters.keys()) + else: + # Fix the generated argument lists of partial methods + sig = signature(validator.func) + args = [ + k + for k in signature(validator.func).parameters.keys() + if k not in validator.args | validator.keywords.keys() + ] + + first_arg = args.pop(0) + if first_arg == "self": + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + elif first_arg == "cls": + # assume the second argument is value + return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) + else: + # assume the first argument was value which has already been removed + return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) + + +def prep_validators(v_funcs: Iterable[AnyCallable]) -> "ValidatorsList": + return [make_generic_validator(f) for f in v_funcs if f] + + +all_kwargs = {"values", "field", "config"} + + +def _generic_validator_cls(validator: AnyCallable, sig: "Signature", args: Set[str]) -> "ValidatorCallable": + # assume the first argument is value + has_kwargs = False + if "kwargs" in args: + has_kwargs = True + args -= {"kwargs"} + + if not args.issubset(all_kwargs): + raise ConfigError( + f"Invalid signature for validator {validator}: {sig}, should be: " + f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(cls, v) + elif args == {"values"}: + return lambda cls, v, values, field, config: validator(cls, v, values=values) + elif args == {"field"}: + return lambda cls, v, values, field, config: validator(cls, v, field=field) + elif args == {"config"}: + return lambda cls, v, values, field, config: validator(cls, v, config=config) + elif args == {"values", "field"}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) + elif args == {"values", "config"}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) + elif args == {"field", "config"}: + return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + + +def _generic_validator_basic(validator: AnyCallable, sig: "Signature", args: Set[str]) -> "ValidatorCallable": + has_kwargs = False + if "kwargs" in args: + has_kwargs = True + args -= {"kwargs"} + + if not args.issubset(all_kwargs): + raise ConfigError( + f"Invalid signature for validator {validator}: {sig}, should be: " + f'(value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(v) + elif args == {"values"}: + return lambda cls, v, values, field, config: validator(v, values=values) + elif args == {"field"}: + return lambda cls, v, values, field, config: validator(v, field=field) + elif args == {"config"}: + return lambda cls, v, values, field, config: validator(v, config=config) + elif args == {"values", "field"}: + return lambda cls, v, values, field, config: validator(v, values=values, field=field) + elif args == {"values", "config"}: + return lambda cls, v, values, field, config: validator(v, values=values, config=config) + elif args == {"field", "config"}: + return lambda cls, v, values, field, config: validator(v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + + +def gather_all_validators(type_: "ModelOrDc") -> Dict[str, "AnyClassMethod"]: + all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated] + return { + k: v + for k, v in all_attributes.items() + if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) + } diff --git a/vendor/pydantic/color.py b/vendor/pydantic/color.py new file mode 100644 index 0000000..b214bbf --- /dev/null +++ b/vendor/pydantic/color.py @@ -0,0 +1,494 @@ +""" +Color definitions are used as per CSS3 specification: +http://www.w3.org/TR/css3-color/#svg-color + +A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. + +In these cases the LAST color when sorted alphabetically takes preferences, +eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". +""" +import math +import re +from colorsys import hls_to_rgb, rgb_to_hls +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast + +from .errors import ColorError +from .utils import Representation, almost_equal_floats + +if TYPE_CHECKING: + from .typing import CallableGenerator, ReprArgs + +ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] +ColorType = Union[ColorTuple, str] +HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] + + +class RGBA: + """ + Internal use only as a representation of a color. + """ + + __slots__ = "r", "g", "b", "alpha", "_tuple" + + def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): + self.r = r + self.g = g + self.b = b + self.alpha = alpha + + self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) + + def __getitem__(self, item: Any) -> Any: + return self._tuple[item] + + +# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached +r_hex_short = r"\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*" +r_hex_long = r"\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*" +_r_255 = r"(\d{1,3}(?:\.\d+)?)" +_r_comma = r"\s*,\s*" +r_rgb = rf"\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*" +_r_alpha = r"(\d(?:\.\d+)?|\.\d+|\d{1,2}%)" +r_rgba = rf"\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*" +_r_h = r"(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?" +_r_sl = r"(\d{1,3}(?:\.\d+)?)%" +r_hsl = rf"\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*" +r_hsla = rf"\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*" + +# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used +repeat_colors = {int(c * 2, 16) for c in "0123456789abcdef"} +rads = 2 * math.pi + + +class Color(Representation): + __slots__ = "_original", "_rgba" + + def __init__(self, value: ColorType) -> None: + self._rgba: RGBA + self._original: ColorType + if isinstance(value, (tuple, list)): + self._rgba = parse_tuple(value) + elif isinstance(value, str): + self._rgba = parse_str(value) + elif isinstance(value, Color): + self._rgba = value._rgba + value = value._original + else: + raise ColorError(reason="value must be a tuple, list or string") + + # if we've got here value must be a valid color + self._original = value + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="color") + + def original(self) -> ColorType: + """ + Original value passed to Color + """ + return self._original + + def as_named(self, *, fallback: bool = False) -> str: + if self._rgba.alpha is None: + rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) + try: + return COLORS_BY_VALUE[rgb] + except KeyError as e: + if fallback: + return self.as_hex() + else: + raise ValueError("no named color found, use fallback=True, as_hex() or as_rgb()") from e + else: + return self.as_hex() + + def as_hex(self) -> str: + """ + Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string + a "short" representation of the color is possible and whether there's an alpha channel. + """ + values = [float_to_255(c) for c in self._rgba[:3]] + if self._rgba.alpha is not None: + values.append(float_to_255(self._rgba.alpha)) + + as_hex = "".join(f"{v:02x}" for v in values) + if all(c in repeat_colors for c in values): + as_hex = "".join(as_hex[c] for c in range(0, len(as_hex), 2)) + return "#" + as_hex + + def as_rgb(self) -> str: + """ + Color as an rgb(, , ) or rgba(, , , ) string. + """ + if self._rgba.alpha is None: + return f"rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})" + else: + return ( + f"rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, " + f"{round(self._alpha_float(), 2)})" + ) + + def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: + """ + Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is + in the range 0 to 1. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + r, g, b = (float_to_255(c) for c in self._rgba[:3]) + if alpha is None: + if self._rgba.alpha is None: + return r, g, b + else: + return r, g, b, self._alpha_float() + elif alpha: + return r, g, b, self._alpha_float() + else: + # alpha is False + return r, g, b + + def as_hsl(self) -> str: + """ + Color as an hsl(, , ) or hsl(, , , ) string. + """ + if self._rgba.alpha is None: + h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore + return f"hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})" + else: + h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore + return f"hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})" + + def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: + """ + Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in + the range 0 to 1. + + NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) + if alpha is None: + if self._rgba.alpha is None: + return h, s, l + else: + return h, s, l, self._alpha_float() + if alpha: + return h, s, l, self._alpha_float() + else: + # alpha is False + return h, s, l + + def _alpha_float(self) -> float: + return 1 if self._rgba.alpha is None else self._rgba.alpha + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls + + def __str__(self) -> str: + return self.as_named(fallback=True) + + def __repr_args__(self) -> "ReprArgs": + return [(None, self.as_named(fallback=True))] + [("rgb", self.as_rgb_tuple())] # type: ignore + + def __eq__(self, other: Any) -> bool: + return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() + + def __hash__(self) -> int: + return hash(self.as_rgb_tuple()) + + +def parse_tuple(value: Tuple[Any, ...]) -> RGBA: + """ + Parse a tuple or list as a color. + """ + if len(value) == 3: + r, g, b = (parse_color_value(v) for v in value) + return RGBA(r, g, b, None) + elif len(value) == 4: + r, g, b = (parse_color_value(v) for v in value[:3]) + return RGBA(r, g, b, parse_float_alpha(value[3])) + else: + raise ColorError(reason="tuples must have length 3 or 4") + + +def parse_str(value: str) -> RGBA: + """ + Parse a string to an RGBA tuple, trying the following formats (in this order): + * named color, see COLORS_BY_NAME below + * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) + * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) + * `rgb(, , ) ` + * `rgba(, , , )` + """ + value_lower = value.lower() + try: + r, g, b = COLORS_BY_NAME[value_lower] + except KeyError: + pass + else: + return ints_to_rgba(r, g, b, None) + + m = re.fullmatch(r_hex_short, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v * 2, 16) for v in rgb) + if a: + alpha: Optional[float] = int(a * 2, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_hex_long, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v, 16) for v in rgb) + if a: + alpha = int(a, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_rgb, value_lower) + if m: + return ints_to_rgba(*m.groups(), None) # type: ignore + + m = re.fullmatch(r_rgba, value_lower) + if m: + return ints_to_rgba(*m.groups()) # type: ignore + + m = re.fullmatch(r_hsl, value_lower) + if m: + h, h_units, s, l_ = m.groups() + return parse_hsl(h, h_units, s, l_) + + m = re.fullmatch(r_hsla, value_lower) + if m: + h, h_units, s, l_, a = m.groups() + return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) + + raise ColorError(reason="string not recognised as a valid color") + + +def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: + return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) + + +def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: + """ + Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number + in the range 0 to 1 + """ + try: + color = float(value) + except ValueError: + raise ColorError(reason="color values must be a valid number") + if 0 <= color <= max_val: + return color / max_val + else: + raise ColorError(reason=f"color values must be in the range 0 to {max_val}") + + +def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: + """ + Parse a value checking it's a valid float in the range 0 to 1 + """ + if value is None: + return None + try: + if isinstance(value, str) and value.endswith("%"): + alpha = float(value[:-1]) / 100 + else: + alpha = float(value) + except ValueError: + raise ColorError(reason="alpha values must be a valid float") + + if almost_equal_floats(alpha, 1): + return None + elif 0 <= alpha <= 1: + return alpha + else: + raise ColorError(reason="alpha values must be in the range 0 to 1") + + +def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: + """ + Parse raw hue, saturation, lightness and alpha values and convert to RGBA. + """ + s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) + + h_value = float(h) + if h_units in {None, "deg"}: + h_value = h_value % 360 / 360 + elif h_units == "rad": + h_value = h_value % rads / rads + else: + # turns + h_value = h_value % 1 + + r, g, b = hls_to_rgb(h_value, l_value, s_value) + return RGBA(r, g, b, alpha) + + +def float_to_255(c: float) -> int: + return int(round(c * 255)) + + +COLORS_BY_NAME = { + "aliceblue": (240, 248, 255), + "antiquewhite": (250, 235, 215), + "aqua": (0, 255, 255), + "aquamarine": (127, 255, 212), + "azure": (240, 255, 255), + "beige": (245, 245, 220), + "bisque": (255, 228, 196), + "black": (0, 0, 0), + "blanchedalmond": (255, 235, 205), + "blue": (0, 0, 255), + "blueviolet": (138, 43, 226), + "brown": (165, 42, 42), + "burlywood": (222, 184, 135), + "cadetblue": (95, 158, 160), + "chartreuse": (127, 255, 0), + "chocolate": (210, 105, 30), + "coral": (255, 127, 80), + "cornflowerblue": (100, 149, 237), + "cornsilk": (255, 248, 220), + "crimson": (220, 20, 60), + "cyan": (0, 255, 255), + "darkblue": (0, 0, 139), + "darkcyan": (0, 139, 139), + "darkgoldenrod": (184, 134, 11), + "darkgray": (169, 169, 169), + "darkgreen": (0, 100, 0), + "darkgrey": (169, 169, 169), + "darkkhaki": (189, 183, 107), + "darkmagenta": (139, 0, 139), + "darkolivegreen": (85, 107, 47), + "darkorange": (255, 140, 0), + "darkorchid": (153, 50, 204), + "darkred": (139, 0, 0), + "darksalmon": (233, 150, 122), + "darkseagreen": (143, 188, 143), + "darkslateblue": (72, 61, 139), + "darkslategray": (47, 79, 79), + "darkslategrey": (47, 79, 79), + "darkturquoise": (0, 206, 209), + "darkviolet": (148, 0, 211), + "deeppink": (255, 20, 147), + "deepskyblue": (0, 191, 255), + "dimgray": (105, 105, 105), + "dimgrey": (105, 105, 105), + "dodgerblue": (30, 144, 255), + "firebrick": (178, 34, 34), + "floralwhite": (255, 250, 240), + "forestgreen": (34, 139, 34), + "fuchsia": (255, 0, 255), + "gainsboro": (220, 220, 220), + "ghostwhite": (248, 248, 255), + "gold": (255, 215, 0), + "goldenrod": (218, 165, 32), + "gray": (128, 128, 128), + "green": (0, 128, 0), + "greenyellow": (173, 255, 47), + "grey": (128, 128, 128), + "honeydew": (240, 255, 240), + "hotpink": (255, 105, 180), + "indianred": (205, 92, 92), + "indigo": (75, 0, 130), + "ivory": (255, 255, 240), + "khaki": (240, 230, 140), + "lavender": (230, 230, 250), + "lavenderblush": (255, 240, 245), + "lawngreen": (124, 252, 0), + "lemonchiffon": (255, 250, 205), + "lightblue": (173, 216, 230), + "lightcoral": (240, 128, 128), + "lightcyan": (224, 255, 255), + "lightgoldenrodyellow": (250, 250, 210), + "lightgray": (211, 211, 211), + "lightgreen": (144, 238, 144), + "lightgrey": (211, 211, 211), + "lightpink": (255, 182, 193), + "lightsalmon": (255, 160, 122), + "lightseagreen": (32, 178, 170), + "lightskyblue": (135, 206, 250), + "lightslategray": (119, 136, 153), + "lightslategrey": (119, 136, 153), + "lightsteelblue": (176, 196, 222), + "lightyellow": (255, 255, 224), + "lime": (0, 255, 0), + "limegreen": (50, 205, 50), + "linen": (250, 240, 230), + "magenta": (255, 0, 255), + "maroon": (128, 0, 0), + "mediumaquamarine": (102, 205, 170), + "mediumblue": (0, 0, 205), + "mediumorchid": (186, 85, 211), + "mediumpurple": (147, 112, 219), + "mediumseagreen": (60, 179, 113), + "mediumslateblue": (123, 104, 238), + "mediumspringgreen": (0, 250, 154), + "mediumturquoise": (72, 209, 204), + "mediumvioletred": (199, 21, 133), + "midnightblue": (25, 25, 112), + "mintcream": (245, 255, 250), + "mistyrose": (255, 228, 225), + "moccasin": (255, 228, 181), + "navajowhite": (255, 222, 173), + "navy": (0, 0, 128), + "oldlace": (253, 245, 230), + "olive": (128, 128, 0), + "olivedrab": (107, 142, 35), + "orange": (255, 165, 0), + "orangered": (255, 69, 0), + "orchid": (218, 112, 214), + "palegoldenrod": (238, 232, 170), + "palegreen": (152, 251, 152), + "paleturquoise": (175, 238, 238), + "palevioletred": (219, 112, 147), + "papayawhip": (255, 239, 213), + "peachpuff": (255, 218, 185), + "peru": (205, 133, 63), + "pink": (255, 192, 203), + "plum": (221, 160, 221), + "powderblue": (176, 224, 230), + "purple": (128, 0, 128), + "red": (255, 0, 0), + "rosybrown": (188, 143, 143), + "royalblue": (65, 105, 225), + "saddlebrown": (139, 69, 19), + "salmon": (250, 128, 114), + "sandybrown": (244, 164, 96), + "seagreen": (46, 139, 87), + "seashell": (255, 245, 238), + "sienna": (160, 82, 45), + "silver": (192, 192, 192), + "skyblue": (135, 206, 235), + "slateblue": (106, 90, 205), + "slategray": (112, 128, 144), + "slategrey": (112, 128, 144), + "snow": (255, 250, 250), + "springgreen": (0, 255, 127), + "steelblue": (70, 130, 180), + "tan": (210, 180, 140), + "teal": (0, 128, 128), + "thistle": (216, 191, 216), + "tomato": (255, 99, 71), + "turquoise": (64, 224, 208), + "violet": (238, 130, 238), + "wheat": (245, 222, 179), + "white": (255, 255, 255), + "whitesmoke": (245, 245, 245), + "yellow": (255, 255, 0), + "yellowgreen": (154, 205, 50), +} + +COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} diff --git a/vendor/pydantic/config.py b/vendor/pydantic/config.py new file mode 100644 index 0000000..689d759 --- /dev/null +++ b/vendor/pydantic/config.py @@ -0,0 +1,190 @@ +import json +from enum import Enum +from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union + +from typing_extensions import Literal, Protocol + +from .typing import AnyArgTCallable, AnyCallable +from .utils import GetterDict +from .version import compiled + +if TYPE_CHECKING: + from typing import overload + + from .fields import ModelField + from .main import BaseModel + + ConfigType = Type["BaseConfig"] + + class SchemaExtraCallable(Protocol): + @overload + def __call__(self, schema: Dict[str, Any]) -> None: + pass + + @overload + def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None: + pass + +else: + SchemaExtraCallable = Callable[..., None] + +__all__ = "BaseConfig", "ConfigDict", "get_config", "Extra", "inherit_config", "prepare_config" + + +class Extra(str, Enum): + allow = "allow" + ignore = "ignore" + forbid = "forbid" + + +# https://github.com/cython/cython/issues/4003 +# Fixed in Cython 3 and Pydantic v1 won't support Cython 3. +# Pydantic v2 doesn't depend on Cython at all. +if not compiled: + from typing_extensions import TypedDict + + class ConfigDict(TypedDict, total=False): + title: Optional[str] + anystr_lower: bool + anystr_strip_whitespace: bool + min_anystr_length: int + max_anystr_length: Optional[int] + validate_all: bool + extra: Extra + allow_mutation: bool + frozen: bool + allow_population_by_field_name: bool + use_enum_values: bool + fields: Dict[str, Union[str, Dict[str, str]]] + validate_assignment: bool + error_msg_templates: Dict[str, str] + arbitrary_types_allowed: bool + orm_mode: bool + getter_dict: Type[GetterDict] + alias_generator: Optional[Callable[[str], str]] + keep_untouched: Tuple[type, ...] + schema_extra: Union[Dict[str, object], "SchemaExtraCallable"] + json_loads: Callable[[str], object] + json_dumps: AnyArgTCallable[str] + json_encoders: Dict[Type[object], AnyCallable] + underscore_attrs_are_private: bool + allow_inf_nan: bool + copy_on_model_validation: Literal["none", "deep", "shallow"] + # whether dataclass `__post_init__` should be run after validation + post_init_call: Literal["before_validation", "after_validation"] + +else: + ConfigDict = dict # type: ignore + + +class BaseConfig: + title: Optional[str] = None + anystr_lower: bool = False + anystr_upper: bool = False + anystr_strip_whitespace: bool = False + min_anystr_length: int = 0 + max_anystr_length: Optional[int] = None + validate_all: bool = False + extra: Extra = Extra.ignore + allow_mutation: bool = True + frozen: bool = False + allow_population_by_field_name: bool = False + use_enum_values: bool = False + fields: Dict[str, Union[str, Dict[str, str]]] = {} + validate_assignment: bool = False + error_msg_templates: Dict[str, str] = {} + arbitrary_types_allowed: bool = False + orm_mode: bool = False + getter_dict: Type[GetterDict] = GetterDict + alias_generator: Optional[Callable[[str], str]] = None + keep_untouched: Tuple[type, ...] = () + schema_extra: Union[Dict[str, Any], "SchemaExtraCallable"] = {} + json_loads: Callable[[str], Any] = json.loads + json_dumps: Callable[..., str] = json.dumps + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {} + underscore_attrs_are_private: bool = False + allow_inf_nan: bool = True + + # whether inherited models as fields should be reconstructed as base model, + # and whether such a copy should be shallow or deep + copy_on_model_validation: Literal["none", "deep", "shallow"] = "shallow" + + # whether `Union` should check all allowed types before even trying to coerce + smart_union: bool = False + # whether dataclass `__post_init__` should be run before or after validation + post_init_call: Literal["before_validation", "after_validation"] = "before_validation" + + @classmethod + def get_field_info(cls, name: str) -> Dict[str, Any]: + """ + Get properties of FieldInfo from the `fields` property of the config class. + """ + + fields_value = cls.fields.get(name) + + if isinstance(fields_value, str): + field_info: Dict[str, Any] = {"alias": fields_value} + elif isinstance(fields_value, dict): + field_info = fields_value + else: + field_info = {} + + if "alias" in field_info: + field_info.setdefault("alias_priority", 2) + + if field_info.get("alias_priority", 0) <= 1 and cls.alias_generator: + alias = cls.alias_generator(name) + if not isinstance(alias, str): + raise TypeError(f"Config.alias_generator must return str, not {alias.__class__}") + field_info.update(alias=alias, alias_priority=1) + return field_info + + @classmethod + def prepare_field(cls, field: "ModelField") -> None: + """ + Optional hook to check or modify fields during model creation. + """ + + +def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]: + if config is None: + return BaseConfig + + else: + config_dict = ( + config + if isinstance(config, dict) + else {k: getattr(config, k) for k in dir(config) if not k.startswith("__")} + ) + + class Config(BaseConfig): + ... + + for k, v in config_dict.items(): + setattr(Config, k, v) + return Config + + +def inherit_config(self_config: "ConfigType", parent_config: "ConfigType", **namespace: Any) -> "ConfigType": + if not self_config: + base_classes: Tuple["ConfigType", ...] = (parent_config,) + elif self_config == parent_config: + base_classes = (self_config,) + else: + base_classes = self_config, parent_config + + namespace["json_encoders"] = { + **getattr(parent_config, "json_encoders", {}), + **getattr(self_config, "json_encoders", {}), + **namespace.get("json_encoders", {}), + } + + return type("Config", base_classes, namespace) + + +def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: + if not isinstance(config.extra, Extra): + try: + config.extra = Extra(config.extra) + except ValueError: + raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') diff --git a/vendor/pydantic/dataclasses.py b/vendor/pydantic/dataclasses.py new file mode 100644 index 0000000..c6f1af0 --- /dev/null +++ b/vendor/pydantic/dataclasses.py @@ -0,0 +1,500 @@ +""" +The main purpose is to enhance stdlib dataclasses by adding validation +A pydantic dataclass can be generated from scratch or from a stdlib one. + +Behind the scene, a pydantic dataclass is just like a regular one on which we attach +a `BaseModel` and magic methods to trigger the validation of the data. +`__init__` and `__post_init__` are hence overridden and have extra logic to be +able to validate input data. + +When a pydantic dataclass is generated from scratch, it's just a plain dataclass +with validation triggered at initialization + +The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g. + +```py +@dataclasses.dataclass +class M: + x: int + +ValidatedM = pydantic.dataclasses.dataclass(M) +``` + +We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one! + +```py +assert isinstance(ValidatedM(x=1), M) +assert ValidatedM(x=1) == M(x=1) +``` + +This means we **don't want to create a new dataclass that inherits from it** +The trick is to create a wrapper around `M` that will act as a proxy to trigger +validation without altering default `M` behaviour. +""" +import copy +import dataclasses +import sys +from contextlib import contextmanager +from functools import wraps + +try: + from functools import cached_property +except ImportError: + # cached_property available only for python3.8+ + pass + +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload + +from typing_extensions import dataclass_transform + +from .class_validators import gather_all_validators +from .config import BaseConfig, ConfigDict, Extra, get_config +from .error_wrappers import ValidationError +from .errors import DataclassTypeError +from .fields import Field, FieldInfo, Required, Undefined +from .main import create_model, validate_model +from .utils import ClassAttribute + +if TYPE_CHECKING: + from .main import BaseModel + from .typing import CallableGenerator, NoArgAnyCallable + + DataclassT = TypeVar("DataclassT", bound="Dataclass") + + DataclassClassOrWrapper = Union[Type["Dataclass"], "DataclassProxy"] + + class Dataclass: + # stdlib attributes + __dataclass_fields__: ClassVar[Dict[str, Any]] + __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` + __post_init__: ClassVar[Callable[..., None]] + + # Added by pydantic + __pydantic_run_validation__: ClassVar[bool] + __post_init_post_parse__: ClassVar[Callable[..., None]] + __pydantic_initialised__: ClassVar[bool] + __pydantic_model__: ClassVar[Type[BaseModel]] + __pydantic_validate_values__: ClassVar[Callable[["Dataclass"], None]] + __pydantic_has_field_info_default__: ClassVar[bool] # whether a `pydantic.Field` is used as default value + + def __init__(self, *args: object, **kwargs: object) -> None: + pass + + @classmethod + def __get_validators__(cls: Type["Dataclass"]) -> "CallableGenerator": + pass + + @classmethod + def __validate__(cls: Type["DataclassT"], v: Any) -> "DataclassT": + pass + + +__all__ = [ + "dataclass", + "set_validation", + "create_pydantic_model_from_dataclass", + "is_builtin_dataclass", + "make_dataclass_validator", +] + +_T = TypeVar("_T") + +if sys.version_info >= (3, 10): + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + kw_only: bool = ..., + ) -> Callable[[Type[_T]], "DataclassClassOrWrapper"]: + ... + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + _cls: Type[_T], + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + kw_only: bool = ..., + ) -> "DataclassClassOrWrapper": + ... + +else: + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + ) -> Callable[[Type[_T]], "DataclassClassOrWrapper"]: + ... + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + _cls: Type[_T], + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + ) -> "DataclassClassOrWrapper": + ... + + +@dataclass_transform(field_specifiers=(dataclasses.field, Field)) +def dataclass( + _cls: Optional[Type[_T]] = None, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + kw_only: bool = False, +) -> Union[Callable[[Type[_T]], "DataclassClassOrWrapper"], "DataclassClassOrWrapper"]: + """ + Like the python standard lib dataclasses but with type validation. + The result is either a pydantic dataclass that will validate input data + or a wrapper that will trigger validation around a stdlib dataclass + to avoid modifying it directly + """ + the_config = get_config(config) + + def wrap(cls: Type[Any]) -> "DataclassClassOrWrapper": + should_use_proxy = ( + use_proxy + if use_proxy is not None + else ( + is_builtin_dataclass(cls) + and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0]))) + ) + ) + if should_use_proxy: + dc_cls_doc = "" + dc_cls = DataclassProxy(cls) + default_validate_on_init = False + else: + dc_cls_doc = cls.__doc__ or "" # needs to be done before generating dataclass + if sys.version_info >= (3, 10): + dc_cls = dataclasses.dataclass( + cls, + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + kw_only=kw_only, + ) + else: + dc_cls = dataclasses.dataclass( # type: ignore + cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen + ) + default_validate_on_init = True + + should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init + _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc) + dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls}) + return dc_cls + + if _cls is None: + return wrap + + return wrap(_cls) + + +@contextmanager +def set_validation(cls: Type["DataclassT"], value: bool) -> Generator[Type["DataclassT"], None, None]: + original_run_validation = cls.__pydantic_run_validation__ + try: + cls.__pydantic_run_validation__ = value + yield cls + finally: + cls.__pydantic_run_validation__ = original_run_validation + + +class DataclassProxy: + __slots__ = "__dataclass__" + + def __init__(self, dc_cls: Type["Dataclass"]) -> None: + object.__setattr__(self, "__dataclass__", dc_cls) + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + with set_validation(self.__dataclass__, True): + return self.__dataclass__(*args, **kwargs) + + def __getattr__(self, name: str) -> Any: + return getattr(self.__dataclass__, name) + + def __setattr__(self, __name: str, __value: Any) -> None: + return setattr(self.__dataclass__, __name, __value) + + def __instancecheck__(self, instance: Any) -> bool: + return isinstance(instance, self.__dataclass__) + + def __copy__(self) -> "DataclassProxy": + return DataclassProxy(copy.copy(self.__dataclass__)) + + def __deepcopy__(self, memo: Any) -> "DataclassProxy": + return DataclassProxy(copy.deepcopy(self.__dataclass__, memo)) + + +def _add_pydantic_validation_attributes( # noqa: C901 (ignore complexity) + dc_cls: Type["Dataclass"], + config: Type[BaseConfig], + validate_on_init: bool, + dc_cls_doc: str, +) -> None: + """ + We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass + it won't even exist (code is generated on the fly by `dataclasses`) + By default, we run validation after `__init__` or `__post_init__` if defined + """ + init = dc_cls.__init__ + + @wraps(init) + def handle_extra_init(self: "Dataclass", *args: Any, **kwargs: Any) -> None: + if config.extra == Extra.ignore: + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + elif config.extra == Extra.allow: + for k, v in kwargs.items(): + self.__dict__.setdefault(k, v) + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + else: + init(self, *args, **kwargs) + + if hasattr(dc_cls, "__post_init__"): + try: + post_init = dc_cls.__post_init__.__wrapped__ # type: ignore[attr-defined] + except AttributeError: + post_init = dc_cls.__post_init__ + + @wraps(post_init) + def new_post_init(self: "Dataclass", *args: Any, **kwargs: Any) -> None: + if config.post_init_call == "before_validation": + post_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + if hasattr(self, "__post_init_post_parse__"): + self.__post_init_post_parse__(*args, **kwargs) + + if config.post_init_call == "after_validation": + post_init(self, *args, **kwargs) + + setattr(dc_cls, "__init__", handle_extra_init) + setattr(dc_cls, "__post_init__", new_post_init) + + else: + + @wraps(init) + def new_init(self: "Dataclass", *args: Any, **kwargs: Any) -> None: + handle_extra_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + + if hasattr(self, "__post_init_post_parse__"): + # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of + # public method `dataclasses.fields` + + # get all initvars and their default values + initvars_and_values: Dict[str, Any] = {} + for i, f in enumerate(self.__class__.__dataclass_fields__.values()): + if f._field_type is dataclasses._FIELD_INITVAR: # type: ignore[attr-defined] + try: + # set arg value by default + initvars_and_values[f.name] = args[i] + except IndexError: + initvars_and_values[f.name] = kwargs.get(f.name, f.default) + + self.__post_init_post_parse__(**initvars_and_values) + + setattr(dc_cls, "__init__", new_init) + + setattr(dc_cls, "__pydantic_run_validation__", ClassAttribute("__pydantic_run_validation__", validate_on_init)) + setattr(dc_cls, "__pydantic_initialised__", False) + setattr(dc_cls, "__pydantic_model__", create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc)) + setattr(dc_cls, "__pydantic_validate_values__", _dataclass_validate_values) + setattr(dc_cls, "__validate__", classmethod(_validate_dataclass)) + setattr(dc_cls, "__get_validators__", classmethod(_get_validators)) + + if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen: + setattr(dc_cls, "__setattr__", _dataclass_validate_assignment_setattr) + + +def _get_validators(cls: "DataclassClassOrWrapper") -> "CallableGenerator": + yield cls.__validate__ + + +def _validate_dataclass(cls: Type["DataclassT"], v: Any) -> "DataclassT": + with set_validation(cls, True): + if isinstance(v, cls): + v.__pydantic_validate_values__() + return v + elif isinstance(v, (list, tuple)): + return cls(*v) + elif isinstance(v, dict): + return cls(**v) + else: + raise DataclassTypeError(class_name=cls.__name__) + + +def create_pydantic_model_from_dataclass( + dc_cls: Type["Dataclass"], + config: Type[Any] = BaseConfig, + dc_cls_doc: Optional[str] = None, +) -> Type["BaseModel"]: + field_definitions: Dict[str, Any] = {} + for field in dataclasses.fields(dc_cls): + default: Any = Undefined + default_factory: Optional["NoArgAnyCallable"] = None + field_info: FieldInfo + + if field.default is not dataclasses.MISSING: + default = field.default + elif field.default_factory is not dataclasses.MISSING: + default_factory = field.default_factory + else: + default = Required + + if isinstance(default, FieldInfo): + field_info = default + dc_cls.__pydantic_has_field_info_default__ = True + else: + field_info = Field(default=default, default_factory=default_factory, **field.metadata) + + field_definitions[field.name] = (field.type, field_info) + + validators = gather_all_validators(dc_cls) + model: Type["BaseModel"] = create_model( + dc_cls.__name__, + __config__=config, + __module__=dc_cls.__module__, + __validators__=validators, + __cls_kwargs__={"__resolve_forward_refs__": False}, + **field_definitions, + ) + model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or "" + return model + + +if sys.version_info >= (3, 8): + + def _is_field_cached_property(obj: "Dataclass", k: str) -> bool: + return isinstance(getattr(type(obj), k, None), cached_property) + +else: + + def _is_field_cached_property(obj: "Dataclass", k: str) -> bool: + return False + + +def _dataclass_validate_values(self: "Dataclass") -> None: + # validation errors can occur if this function is called twice on an already initialised dataclass. + # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property + if getattr(self, "__pydantic_initialised__"): + return + if getattr(self, "__pydantic_has_field_info_default__", False): + # We need to remove `FieldInfo` values since they are not valid as input + # It's ok to do that because they are obviously the default values! + input_data = { + k: v + for k, v in self.__dict__.items() + if not (isinstance(v, FieldInfo) or _is_field_cached_property(self, k)) + } + else: + input_data = {k: v for k, v in self.__dict__.items() if not _is_field_cached_property(self, k)} + d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__) + if validation_error: + raise validation_error + self.__dict__.update(d) + object.__setattr__(self, "__pydantic_initialised__", True) + + +def _dataclass_validate_assignment_setattr(self: "Dataclass", name: str, value: Any) -> None: + if self.__pydantic_initialised__: + d = dict(self.__dict__) + d.pop(name, None) + known_field = self.__pydantic_model__.__fields__.get(name, None) + if known_field: + value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + + object.__setattr__(self, name, value) + + +def is_builtin_dataclass(_cls: Type[Any]) -> bool: + """ + Whether a class is a stdlib dataclass + (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass) + + we check that + - `_cls` is a dataclass + - `_cls` is not a processed pydantic dataclass (with a basemodel attached) + - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass + e.g. + ``` + @dataclasses.dataclass + class A: + x: int + + @pydantic.dataclasses.dataclass + class B(A): + y: int + ``` + In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), + which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') + """ + return ( + dataclasses.is_dataclass(_cls) + and not hasattr(_cls, "__pydantic_model__") + and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, "__annotations__", {}))) + ) + + +def make_dataclass_validator(dc_cls: Type["Dataclass"], config: Type[BaseConfig]) -> "CallableGenerator": + """ + Create a pydantic.dataclass from a builtin dataclass to add type validation + and yield the validators + It retrieves the parameters of the dataclass and forwards them to the newly created dataclass + """ + yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True)) diff --git a/vendor/pydantic/datetime_parse.py b/vendor/pydantic/datetime_parse.py new file mode 100644 index 0000000..055941f --- /dev/null +++ b/vendor/pydantic/datetime_parse.py @@ -0,0 +1,248 @@ +""" +Functions to parse datetime objects. + +We're using regular expressions rather than time.strptime because: +- They provide both validation and parsing. +- They're more flexible for datetimes. +- The date/datetime/time constructors produce friendlier error messages. + +Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at +9718fa2e8abe430c3526a9278dd976443d4ae3c6 + +Changed to: +* use standard python datetime types not django.utils.timezone +* raise ValueError when regex doesn't match rather than returning None +* support parsing unix timestamps for dates and datetimes +""" +import re +from datetime import date, datetime, time, timedelta, timezone +from typing import Dict, Optional, Type, Union + +from . import errors + +date_expr = r"(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})" +time_expr = ( + r"(?P\d{1,2}):(?P\d{1,2})" + r"(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?" + r"(?PZ|[+-]\d{2}(?::?\d{2})?)?$" +) + +date_re = re.compile(f"{date_expr}$") +time_re = re.compile(time_expr) +datetime_re = re.compile(f"{date_expr}[T ]{time_expr}") + +standard_duration_re = re.compile( + r"^" + r"(?:(?P-?\d+) (days?, )?)?" + r"((?:(?P-?\d+):)(?=\d+:\d+))?" + r"(?:(?P-?\d+):)?" + r"(?P-?\d+)" + r"(?:\.(?P\d{1,6})\d{0,6})?" + r"$" +) + +# Support the sections of ISO 8601 date representation that are accepted by timedelta +iso8601_duration_re = re.compile( + r"^(?P[-+]?)" + r"P" + r"(?:(?P\d+(.\d+)?)D)?" + r"(?:T" + r"(?:(?P\d+(.\d+)?)H)?" + r"(?:(?P\d+(.\d+)?)M)?" + r"(?:(?P\d+(.\d+)?)S)?" + r")?" + r"$" +) + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) +StrBytesIntFloat = Union[str, bytes, int, float] + + +def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f"invalid type; expected {native_expected_type}, string, bytes, int or float") + + +def from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]: + if value == "Z": + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == "-": + offset = -offset + try: + return timezone(timedelta(minutes=offset)) + except ValueError: + raise error() + else: + return None + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = get_numeric(value, "date") + if number is not None: + return from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + match = date_re.match(value) # type: ignore + if match is None: + raise errors.DateError() + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise errors.DateError() + + +def parse_time(value: Union[time, StrBytesIntFloat]) -> time: + """ + Parse a time/string and return a datetime.time. + + Raise ValueError if the input is well formatted but not a valid time. + Raise ValueError if the input isn't well formatted, in particular if it contains an offset. + """ + if isinstance(value, time): + return value + + number = get_numeric(value, "time") + if number is not None: + if number >= 86400: + # doesn't make sense since the time time loop back around to 0 + raise errors.TimeError() + return (datetime.min + timedelta(seconds=number)).time() + + if isinstance(value, bytes): + value = value.decode() + + match = time_re.match(value) # type: ignore + if match is None: + raise errors.TimeError() + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo"), errors.TimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + try: + return time(**kw_) # type: ignore + except ValueError: + raise errors.TimeError() + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = get_numeric(value, "datetime") + if number is not None: + return from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + match = datetime_re.match(value) # type: ignore + if match is None: + raise errors.DateTimeError() + + kw = match.groupdict() + if kw["microsecond"]: + kw["microsecond"] = kw["microsecond"].ljust(6, "0") + + tzinfo = _parse_timezone(kw.pop("tzinfo"), errors.DateTimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_["tzinfo"] = tzinfo + + try: + return datetime(**kw_) # type: ignore + except ValueError: + raise errors.DateTimeError() + + +def parse_duration(value: StrBytesIntFloat) -> timedelta: + """ + Parse a duration int/float/string and return a datetime.timedelta. + + The preferred format for durations in Django is '%d %H:%M:%S.%f'. + + Also supports ISO 8601 representation. + """ + if isinstance(value, timedelta): + return value + + if isinstance(value, (int, float)): + # below code requires a string + value = f"{value:f}" + elif isinstance(value, bytes): + value = value.decode() + + try: + match = standard_duration_re.match(value) or iso8601_duration_re.match(value) + except TypeError: + raise TypeError("invalid type; expected timedelta, string, bytes, int or float") + + if not match: + raise errors.DurationError() + + kw = match.groupdict() + sign = -1 if kw.pop("sign", "+") == "-" else 1 + if kw.get("microseconds"): + kw["microseconds"] = kw["microseconds"].ljust(6, "0") + + if kw.get("seconds") and kw.get("microseconds") and kw["seconds"].startswith("-"): + kw["microseconds"] = "-" + kw["microseconds"] + + kw_ = {k: float(v) for k, v in kw.items() if v is not None} + + return sign * timedelta(**kw_) diff --git a/vendor/pydantic/env_settings.py b/vendor/pydantic/env_settings.py new file mode 100644 index 0000000..110a75b --- /dev/null +++ b/vendor/pydantic/env_settings.py @@ -0,0 +1,350 @@ +import os +import warnings +from pathlib import Path +from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union + +from .config import BaseConfig, Extra +from .fields import ModelField +from .main import BaseModel +from .types import JsonWrapper +from .typing import StrPath, display_as_type, get_origin, is_union +from .utils import deep_update, lenient_issubclass, path_type, sequence_like + +env_file_sentinel = str(object()) + +SettingsSourceCallable = Callable[["BaseSettings"], Dict[str, Any]] +DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]] + + +class SettingsError(ValueError): + pass + + +class BaseSettings(BaseModel): + """ + Base class for settings, allowing values to be overridden by environment variables. + + This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), + Heroku and any 12 factor app design. + """ + + def __init__( + __pydantic_self__, + _env_file: Optional[DotenvType] = env_file_sentinel, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + **values: Any, + ) -> None: + # Uses something other than `self` the first arg to allow "self" as a settable attribute + super().__init__( + **__pydantic_self__._build_values( + values, + _env_file=_env_file, + _env_file_encoding=_env_file_encoding, + _env_nested_delimiter=_env_nested_delimiter, + _secrets_dir=_secrets_dir, + ) + ) + + def _build_values( + self, + init_kwargs: Dict[str, Any], + _env_file: Optional[DotenvType] = None, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + ) -> Dict[str, Any]: + # Configure built-in sources + init_settings = InitSettingsSource(init_kwargs=init_kwargs) + env_settings = EnvSettingsSource( + env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file), + env_file_encoding=( + _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding + ), + env_nested_delimiter=( + _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter + ), + env_prefix_len=len(self.__config__.env_prefix), + ) + file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir) + # Provide a hook to set built-in sources priority and add / remove sources + sources = self.__config__.customise_sources( + init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings + ) + if sources: + return deep_update(*reversed([source(self) for source in sources])) + else: + # no one should mean to do this, but I think returning an empty dict is marginally preferable + # to an informative error and much better than a confusing error + return {} + + class Config(BaseConfig): + env_prefix: str = "" + env_file: Optional[DotenvType] = None + env_file_encoding: Optional[str] = None + env_nested_delimiter: Optional[str] = None + secrets_dir: Optional[StrPath] = None + validate_all: bool = True + extra: Extra = Extra.forbid + arbitrary_types_allowed: bool = True + case_sensitive: bool = False + + @classmethod + def prepare_field(cls, field: ModelField) -> None: + env_names: Union[List[str], AbstractSet[str]] + field_info_from_config = cls.get_field_info(field.name) + + env = field_info_from_config.get("env") or field.field_info.extra.get("env") + if env is None: + if field.has_alias: + warnings.warn( + "aliases are no longer used by BaseSettings to define which environment variables to read. " + 'Instead use the "env" field setting. ' + "See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names", + FutureWarning, + ) + env_names = {cls.env_prefix + field.name} + elif isinstance(env, str): + env_names = {env} + elif isinstance(env, (set, frozenset)): + env_names = env + elif sequence_like(env): + env_names = list(env) + else: + raise TypeError(f"invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set") + + if not cls.case_sensitive: + env_names = env_names.__class__(n.lower() for n in env_names) + field.field_info.extra["env_names"] = env_names + + @classmethod + def customise_sources( + cls, + init_settings: SettingsSourceCallable, + env_settings: SettingsSourceCallable, + file_secret_settings: SettingsSourceCallable, + ) -> Tuple[SettingsSourceCallable, ...]: + return init_settings, env_settings, file_secret_settings + + @classmethod + def parse_env_var(cls, field_name: str, raw_val: str) -> Any: + return cls.json_loads(raw_val) + + # populated by the metaclass using the Config class defined above, annotated here to help IDEs only + __config__: ClassVar[Type[Config]] + + +class InitSettingsSource: + __slots__ = ("init_kwargs",) + + def __init__(self, init_kwargs: Dict[str, Any]): + self.init_kwargs = init_kwargs + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + return self.init_kwargs + + def __repr__(self) -> str: + return f"InitSettingsSource(init_kwargs={self.init_kwargs!r})" + + +class EnvSettingsSource: + __slots__ = ("env_file", "env_file_encoding", "env_nested_delimiter", "env_prefix_len") + + def __init__( + self, + env_file: Optional[DotenvType], + env_file_encoding: Optional[str], + env_nested_delimiter: Optional[str] = None, + env_prefix_len: int = 0, + ): + self.env_file: Optional[DotenvType] = env_file + self.env_file_encoding: Optional[str] = env_file_encoding + self.env_nested_delimiter: Optional[str] = env_nested_delimiter + self.env_prefix_len: int = env_prefix_len + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 + """ + Build environment variables suitable for passing to the Model. + """ + d: Dict[str, Any] = {} + + if settings.__config__.case_sensitive: + env_vars: Mapping[str, Optional[str]] = os.environ + else: + env_vars = {k.lower(): v for k, v in os.environ.items()} + + dotenv_vars = self._read_env_files(settings.__config__.case_sensitive) + if dotenv_vars: + env_vars = {**dotenv_vars, **env_vars} + + for field in settings.__fields__.values(): + env_val: Optional[str] = None + for env_name in field.field_info.extra["env_names"]: + env_val = env_vars.get(env_name) + if env_val is not None: + break + + is_complex, allow_parse_failure = self.field_is_complex(field) + if is_complex: + if env_val is None: + # field is complex but no value found so far, try explode_env_vars + env_val_built = self.explode_env_vars(field, env_vars) + if env_val_built: + d[field.alias] = env_val_built + else: + # field is complex and there's a value, decode that as JSON, then add explode_env_vars + try: + env_val = settings.__config__.parse_env_var(field.name, env_val) + except ValueError as e: + if not allow_parse_failure: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + if isinstance(env_val, dict): + d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars)) + else: + d[field.alias] = env_val + elif env_val is not None: + # simplest case, field is not complex, we only need to add the value if it was found + d[field.alias] = env_val + + return d + + def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]: + env_files = self.env_file + if env_files is None: + return {} + + if isinstance(env_files, (str, os.PathLike)): + env_files = [env_files] + + dotenv_vars = {} + for env_file in env_files: + env_path = Path(env_file).expanduser() + if env_path.is_file(): + dotenv_vars.update( + read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive) + ) + + return dotenv_vars + + def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]: + """ + Find out if a field is complex, and if so whether JSON errors should be ignored + """ + if lenient_issubclass(field.annotation, JsonWrapper): + return False, False + + if field.is_complex(): + allow_parse_failure = False + elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields): + allow_parse_failure = True + else: + return False, False + + return True, allow_parse_failure + + def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]: + """ + Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries. + + This is applied to a single field, hence filtering by env_var prefix. + """ + prefixes = [f"{env_name}{self.env_nested_delimiter}" for env_name in field.field_info.extra["env_names"]] + result: Dict[str, Any] = {} + for env_name, env_val in env_vars.items(): + if not any(env_name.startswith(prefix) for prefix in prefixes): + continue + # we remove the prefix before splitting in case the prefix has characters in common with the delimiter + env_name_without_prefix = env_name[self.env_prefix_len :] + _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter) + env_var = result + for key in keys: + env_var = env_var.setdefault(key, {}) + env_var[last_key] = env_val + + return result + + def __repr__(self) -> str: + return ( + f"EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, " + f"env_nested_delimiter={self.env_nested_delimiter!r})" + ) + + +class SecretsSettingsSource: + __slots__ = ("secrets_dir",) + + def __init__(self, secrets_dir: Optional[StrPath]): + self.secrets_dir: Optional[StrPath] = secrets_dir + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + """ + Build fields from "secrets" files. + """ + secrets: Dict[str, Optional[str]] = {} + + if self.secrets_dir is None: + return secrets + + secrets_path = Path(self.secrets_dir).expanduser() + + if not secrets_path.exists(): + warnings.warn(f'directory "{secrets_path}" does not exist') + return secrets + + if not secrets_path.is_dir(): + raise SettingsError(f"secrets_dir must reference a directory, not a {path_type(secrets_path)}") + + for field in settings.__fields__.values(): + for env_name in field.field_info.extra["env_names"]: + path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive) + if not path: + # path does not exist, we currently don't return a warning for this + continue + + if path.is_file(): + secret_value = path.read_text().strip() + if field.is_complex(): + try: + secret_value = settings.__config__.parse_env_var(field.name, secret_value) + except ValueError as e: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + secrets[field.alias] = secret_value + else: + warnings.warn( + f'attempted to load secret file "{path}" but found a {path_type(path)} instead.', + stacklevel=4, + ) + return secrets + + def __repr__(self) -> str: + return f"SecretsSettingsSource(secrets_dir={self.secrets_dir!r})" + + +def read_env_file( + file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False +) -> Dict[str, Optional[str]]: + try: + from dotenv import dotenv_values + except ImportError as e: + raise ImportError("python-dotenv is not installed, run `pip install pydantic[dotenv]`") from e + + file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or "utf8") + if not case_sensitive: + return {k.lower(): v for k, v in file_vars.items()} + else: + return file_vars + + +def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]: + """ + Find a file within path's directory matching filename, optionally ignoring case. + """ + for f in dir_path.iterdir(): + if f.name == file_name: + return f + elif not case_sensitive and f.name.lower() == file_name.lower(): + return f + return None diff --git a/vendor/pydantic/error_wrappers.py b/vendor/pydantic/error_wrappers.py new file mode 100644 index 0000000..c2eb8ef --- /dev/null +++ b/vendor/pydantic/error_wrappers.py @@ -0,0 +1,161 @@ +import json +from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union + +from .json import pydantic_encoder +from .utils import Representation + +if TYPE_CHECKING: + from typing_extensions import TypedDict + + from .config import BaseConfig + from .types import ModelOrDc + from .typing import ReprArgs + + Loc = Tuple[Union[int, str], ...] + + class _ErrorDictRequired(TypedDict): + loc: Loc + msg: str + type: str + + class ErrorDict(_ErrorDictRequired, total=False): + ctx: Dict[str, Any] + + +__all__ = "ErrorWrapper", "ValidationError" + + +class ErrorWrapper(Representation): + __slots__ = "exc", "_loc" + + def __init__(self, exc: Exception, loc: Union[str, "Loc"]) -> None: + self.exc = exc + self._loc = loc + + def loc_tuple(self) -> "Loc": + if isinstance(self._loc, tuple): + return self._loc + else: + return (self._loc,) + + def __repr_args__(self) -> "ReprArgs": + return [("exc", self.exc), ("loc", self.loc_tuple())] + + +# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] +# but recursive, therefore just use: +ErrorList = Union[Sequence[Any], ErrorWrapper] + + +class ValidationError(Representation, ValueError): + __slots__ = "raw_errors", "model", "_error_cache" + + def __init__(self, errors: Sequence[ErrorList], model: "ModelOrDc") -> None: + self.raw_errors = errors + self.model = model + self._error_cache: Optional[List["ErrorDict"]] = None + + def errors(self) -> List["ErrorDict"]: + if self._error_cache is None: + try: + config = self.model.__config__ # type: ignore + except AttributeError: + config = self.model.__pydantic_model__.__config__ # type: ignore + self._error_cache = list(flatten_errors(self.raw_errors, config)) + return self._error_cache + + def json(self, *, indent: Union[None, int, str] = 2) -> str: + return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) + + def __str__(self) -> str: + errors = self.errors() + no_errors = len(errors) + return ( + f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' + f"{display_errors(errors)}" + ) + + def __repr_args__(self) -> "ReprArgs": + return [("model", self.model.__name__), ("errors", self.errors())] + + +def display_errors(errors: List["ErrorDict"]) -> str: + return "\n".join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) + + +def _display_error_loc(error: "ErrorDict") -> str: + return " -> ".join(str(e) for e in error["loc"]) + + +def _display_error_type_and_ctx(error: "ErrorDict") -> str: + t = "type=" + error["type"] + ctx = error.get("ctx") + if ctx: + return t + "".join(f"; {k}={v}" for k, v in ctx.items()) + else: + return t + + +def flatten_errors( + errors: Sequence[Any], config: Type["BaseConfig"], loc: Optional["Loc"] = None +) -> Generator["ErrorDict", None, None]: + for error in errors: + if isinstance(error, ErrorWrapper): + if loc: + error_loc = loc + error.loc_tuple() + else: + error_loc = error.loc_tuple() + + if isinstance(error.exc, ValidationError): + yield from flatten_errors(error.exc.raw_errors, config, error_loc) + else: + yield error_dict(error.exc, config, error_loc) + elif isinstance(error, list): + yield from flatten_errors(error, config, loc=loc) + else: + raise RuntimeError(f"Unknown error object: {error}") + + +def error_dict(exc: Exception, config: Type["BaseConfig"], loc: "Loc") -> "ErrorDict": + type_ = get_exc_type(exc.__class__) + msg_template = config.error_msg_templates.get(type_) or getattr(exc, "msg_template", None) + ctx = exc.__dict__ + if msg_template: + msg = msg_template.format(**ctx) + else: + msg = str(exc) + + d: "ErrorDict" = {"loc": loc, "msg": msg, "type": type_} + + if ctx: + d["ctx"] = ctx + + return d + + +_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} + + +def get_exc_type(cls: Type[Exception]) -> str: + # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up + try: + return _EXC_TYPE_CACHE[cls] + except KeyError: + r = _get_exc_type(cls) + _EXC_TYPE_CACHE[cls] = r + return r + + +def _get_exc_type(cls: Type[Exception]) -> str: + if issubclass(cls, AssertionError): + return "assertion_error" + + base_name = "type_error" if issubclass(cls, TypeError) else "value_error" + if cls in (TypeError, ValueError): + # just TypeError or ValueError, no extra code + return base_name + + # if it's not a TypeError or ValueError, we just take the lowercase of the exception name + # no chaining or snake case logic, use "code" for more complex error types. + code = getattr(cls, "code", None) or cls.__name__.replace("Error", "").lower() + return base_name + "." + code diff --git a/vendor/pydantic/errors.py b/vendor/pydantic/errors.py new file mode 100644 index 0000000..b9c9ced --- /dev/null +++ b/vendor/pydantic/errors.py @@ -0,0 +1,646 @@ +from decimal import Decimal +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union + +from .typing import display_as_type + +if TYPE_CHECKING: + from .typing import DictStrAny + +# explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc. +__all__ = ( + "PydanticTypeError", + "PydanticValueError", + "ConfigError", + "MissingError", + "ExtraError", + "NoneIsNotAllowedError", + "NoneIsAllowedError", + "WrongConstantError", + "NotNoneError", + "BoolError", + "BytesError", + "DictError", + "EmailError", + "UrlError", + "UrlSchemeError", + "UrlSchemePermittedError", + "UrlUserInfoError", + "UrlHostError", + "UrlHostTldError", + "UrlPortError", + "UrlExtraError", + "EnumError", + "IntEnumError", + "EnumMemberError", + "IntegerError", + "FloatError", + "PathError", + "PathNotExistsError", + "PathNotAFileError", + "PathNotADirectoryError", + "PyObjectError", + "SequenceError", + "ListError", + "SetError", + "FrozenSetError", + "TupleError", + "TupleLengthError", + "ListMinLengthError", + "ListMaxLengthError", + "ListUniqueItemsError", + "SetMinLengthError", + "SetMaxLengthError", + "FrozenSetMinLengthError", + "FrozenSetMaxLengthError", + "AnyStrMinLengthError", + "AnyStrMaxLengthError", + "StrError", + "StrRegexError", + "NumberNotGtError", + "NumberNotGeError", + "NumberNotLtError", + "NumberNotLeError", + "NumberNotMultipleError", + "DecimalError", + "DecimalIsNotFiniteError", + "DecimalMaxDigitsError", + "DecimalMaxPlacesError", + "DecimalWholeDigitsError", + "DateTimeError", + "DateError", + "DateNotInThePastError", + "DateNotInTheFutureError", + "TimeError", + "DurationError", + "HashableError", + "UUIDError", + "UUIDVersionError", + "ArbitraryTypeError", + "ClassError", + "SubclassError", + "JsonError", + "JsonTypeError", + "PatternError", + "DataclassTypeError", + "CallableError", + "IPvAnyAddressError", + "IPvAnyInterfaceError", + "IPvAnyNetworkError", + "IPv4AddressError", + "IPv6AddressError", + "IPv4NetworkError", + "IPv6NetworkError", + "IPv4InterfaceError", + "IPv6InterfaceError", + "ColorError", + "StrictBoolError", + "NotDigitError", + "LuhnValidationError", + "InvalidLengthForBrand", + "InvalidByteSize", + "InvalidByteSizeUnit", + "MissingDiscriminator", + "InvalidDiscriminator", +) + + +def cls_kwargs(cls: Type["PydanticErrorMixin"], ctx: "DictStrAny") -> "PydanticErrorMixin": + """ + For built-in exceptions like ValueError or TypeError, we need to implement + __reduce__ to override the default behaviour (instead of __getstate__/__setstate__) + By default pickle protocol 2 calls `cls.__new__(cls, *args)`. + Since we only use kwargs, we need a little constructor to change that. + Note: the callable can't be a lambda as pickle looks in the namespace to find it + """ + return cls(**ctx) + + +class PydanticErrorMixin: + code: str + msg_template: str + + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx + + def __str__(self) -> str: + return self.msg_template.format(**self.__dict__) + + def __reduce__(self) -> Tuple[Callable[..., "PydanticErrorMixin"], Tuple[Type["PydanticErrorMixin"], "DictStrAny"]]: + return cls_kwargs, (self.__class__, self.__dict__) + + +class PydanticTypeError(PydanticErrorMixin, TypeError): + pass + + +class PydanticValueError(PydanticErrorMixin, ValueError): + pass + + +class ConfigError(RuntimeError): + pass + + +class MissingError(PydanticValueError): + msg_template = "field required" + + +class ExtraError(PydanticValueError): + msg_template = "extra fields not permitted" + + +class NoneIsNotAllowedError(PydanticTypeError): + code = "none.not_allowed" + msg_template = "none is not an allowed value" + + +class NoneIsAllowedError(PydanticTypeError): + code = "none.allowed" + msg_template = "value is not none" + + +class WrongConstantError(PydanticValueError): + code = "const" + + def __str__(self) -> str: + permitted = ", ".join(repr(v) for v in self.permitted) # type: ignore + return f"unexpected value; permitted: {permitted}" + + +class NotNoneError(PydanticTypeError): + code = "not_none" + msg_template = "value is not None" + + +class BoolError(PydanticTypeError): + msg_template = "value could not be parsed to a boolean" + + +class BytesError(PydanticTypeError): + msg_template = "byte type expected" + + +class DictError(PydanticTypeError): + msg_template = "value is not a valid dict" + + +class EmailError(PydanticValueError): + msg_template = "value is not a valid email address" + + +class UrlError(PydanticValueError): + code = "url" + + +class UrlSchemeError(UrlError): + code = "url.scheme" + msg_template = "invalid or missing URL scheme" + + +class UrlSchemePermittedError(UrlError): + code = "url.scheme" + msg_template = "URL scheme not permitted" + + def __init__(self, allowed_schemes: Set[str]): + super().__init__(allowed_schemes=allowed_schemes) + + +class UrlUserInfoError(UrlError): + code = "url.userinfo" + msg_template = "userinfo required in URL but missing" + + +class UrlHostError(UrlError): + code = "url.host" + msg_template = "URL host invalid" + + +class UrlHostTldError(UrlError): + code = "url.host" + msg_template = "URL host invalid, top level domain required" + + +class UrlPortError(UrlError): + code = "url.port" + msg_template = "URL port invalid, port cannot exceed 65535" + + +class UrlExtraError(UrlError): + code = "url.extra" + msg_template = "URL invalid, extra characters found after valid URL: {extra!r}" + + +class EnumMemberError(PydanticTypeError): + code = "enum" + + def __str__(self) -> str: + permitted = ", ".join(repr(v.value) for v in self.enum_values) # type: ignore + return f"value is not a valid enumeration member; permitted: {permitted}" + + +class IntegerError(PydanticTypeError): + msg_template = "value is not a valid integer" + + +class FloatError(PydanticTypeError): + msg_template = "value is not a valid float" + + +class PathError(PydanticTypeError): + msg_template = "value is not a valid path" + + +class _PathValueError(PydanticValueError): + def __init__(self, *, path: Path) -> None: + super().__init__(path=str(path)) + + +class PathNotExistsError(_PathValueError): + code = "path.not_exists" + msg_template = 'file or directory at path "{path}" does not exist' + + +class PathNotAFileError(_PathValueError): + code = "path.not_a_file" + msg_template = 'path "{path}" does not point to a file' + + +class PathNotADirectoryError(_PathValueError): + code = "path.not_a_directory" + msg_template = 'path "{path}" does not point to a directory' + + +class PyObjectError(PydanticTypeError): + msg_template = "ensure this value contains valid import path or valid callable: {error_message}" + + +class SequenceError(PydanticTypeError): + msg_template = "value is not a valid sequence" + + +class IterableError(PydanticTypeError): + msg_template = "value is not a valid iterable" + + +class ListError(PydanticTypeError): + msg_template = "value is not a valid list" + + +class SetError(PydanticTypeError): + msg_template = "value is not a valid set" + + +class FrozenSetError(PydanticTypeError): + msg_template = "value is not a valid frozenset" + + +class DequeError(PydanticTypeError): + msg_template = "value is not a valid deque" + + +class TupleError(PydanticTypeError): + msg_template = "value is not a valid tuple" + + +class TupleLengthError(PydanticValueError): + code = "tuple.length" + msg_template = "wrong tuple length {actual_length}, expected {expected_length}" + + def __init__(self, *, actual_length: int, expected_length: int) -> None: + super().__init__(actual_length=actual_length, expected_length=expected_length) + + +class ListMinLengthError(PydanticValueError): + code = "list.min_items" + msg_template = "ensure this value has at least {limit_value} items" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListMaxLengthError(PydanticValueError): + code = "list.max_items" + msg_template = "ensure this value has at most {limit_value} items" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListUniqueItemsError(PydanticValueError): + code = "list.unique_items" + msg_template = "the list has duplicated items" + + +class SetMinLengthError(PydanticValueError): + code = "set.min_items" + msg_template = "ensure this value has at least {limit_value} items" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class SetMaxLengthError(PydanticValueError): + code = "set.max_items" + msg_template = "ensure this value has at most {limit_value} items" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMinLengthError(PydanticValueError): + code = "frozenset.min_items" + msg_template = "ensure this value has at least {limit_value} items" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMaxLengthError(PydanticValueError): + code = "frozenset.max_items" + msg_template = "ensure this value has at most {limit_value} items" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMinLengthError(PydanticValueError): + code = "any_str.min_length" + msg_template = "ensure this value has at least {limit_value} characters" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMaxLengthError(PydanticValueError): + code = "any_str.max_length" + msg_template = "ensure this value has at most {limit_value} characters" + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class StrError(PydanticTypeError): + msg_template = "str type expected" + + +class StrRegexError(PydanticValueError): + code = "str.regex" + msg_template = 'string does not match regex "{pattern}"' + + def __init__(self, *, pattern: str) -> None: + super().__init__(pattern=pattern) + + +class _NumberBoundError(PydanticValueError): + def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: + super().__init__(limit_value=limit_value) + + +class NumberNotGtError(_NumberBoundError): + code = "number.not_gt" + msg_template = "ensure this value is greater than {limit_value}" + + +class NumberNotGeError(_NumberBoundError): + code = "number.not_ge" + msg_template = "ensure this value is greater than or equal to {limit_value}" + + +class NumberNotLtError(_NumberBoundError): + code = "number.not_lt" + msg_template = "ensure this value is less than {limit_value}" + + +class NumberNotLeError(_NumberBoundError): + code = "number.not_le" + msg_template = "ensure this value is less than or equal to {limit_value}" + + +class NumberNotFiniteError(PydanticValueError): + code = "number.not_finite_number" + msg_template = "ensure this value is a finite number" + + +class NumberNotMultipleError(PydanticValueError): + code = "number.not_multiple" + msg_template = "ensure this value is a multiple of {multiple_of}" + + def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: + super().__init__(multiple_of=multiple_of) + + +class DecimalError(PydanticTypeError): + msg_template = "value is not a valid decimal" + + +class DecimalIsNotFiniteError(PydanticValueError): + code = "decimal.not_finite" + msg_template = "value is not a valid decimal" + + +class DecimalMaxDigitsError(PydanticValueError): + code = "decimal.max_digits" + msg_template = "ensure that there are no more than {max_digits} digits in total" + + def __init__(self, *, max_digits: int) -> None: + super().__init__(max_digits=max_digits) + + +class DecimalMaxPlacesError(PydanticValueError): + code = "decimal.max_places" + msg_template = "ensure that there are no more than {decimal_places} decimal places" + + def __init__(self, *, decimal_places: int) -> None: + super().__init__(decimal_places=decimal_places) + + +class DecimalWholeDigitsError(PydanticValueError): + code = "decimal.whole_digits" + msg_template = "ensure that there are no more than {whole_digits} digits before the decimal point" + + def __init__(self, *, whole_digits: int) -> None: + super().__init__(whole_digits=whole_digits) + + +class DateTimeError(PydanticValueError): + msg_template = "invalid datetime format" + + +class DateError(PydanticValueError): + msg_template = "invalid date format" + + +class DateNotInThePastError(PydanticValueError): + code = "date.not_in_the_past" + msg_template = "date is not in the past" + + +class DateNotInTheFutureError(PydanticValueError): + code = "date.not_in_the_future" + msg_template = "date is not in the future" + + +class TimeError(PydanticValueError): + msg_template = "invalid time format" + + +class DurationError(PydanticValueError): + msg_template = "invalid duration format" + + +class HashableError(PydanticTypeError): + msg_template = "value is not a valid hashable" + + +class UUIDError(PydanticTypeError): + msg_template = "value is not a valid uuid" + + +class UUIDVersionError(PydanticValueError): + code = "uuid.version" + msg_template = "uuid version {required_version} expected" + + def __init__(self, *, required_version: int) -> None: + super().__init__(required_version=required_version) + + +class ArbitraryTypeError(PydanticTypeError): + code = "arbitrary_type" + msg_template = "instance of {expected_arbitrary_type} expected" + + def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None: + super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) + + +class ClassError(PydanticTypeError): + code = "class" + msg_template = "a class is expected" + + +class SubclassError(PydanticTypeError): + code = "subclass" + msg_template = "subclass of {expected_class} expected" + + def __init__(self, *, expected_class: Type[Any]) -> None: + super().__init__(expected_class=display_as_type(expected_class)) + + +class JsonError(PydanticValueError): + msg_template = "Invalid JSON" + + +class JsonTypeError(PydanticTypeError): + code = "json" + msg_template = "JSON object must be str, bytes or bytearray" + + +class PatternError(PydanticValueError): + code = "regex_pattern" + msg_template = "Invalid regular expression" + + +class DataclassTypeError(PydanticTypeError): + code = "dataclass" + msg_template = "instance of {class_name}, tuple or dict expected" + + +class CallableError(PydanticTypeError): + msg_template = "{value} is not callable" + + +class EnumError(PydanticTypeError): + code = "enum_instance" + msg_template = "{value} is not a valid Enum instance" + + +class IntEnumError(PydanticTypeError): + code = "int_enum_instance" + msg_template = "{value} is not a valid IntEnum instance" + + +class IPvAnyAddressError(PydanticValueError): + msg_template = "value is not a valid IPv4 or IPv6 address" + + +class IPvAnyInterfaceError(PydanticValueError): + msg_template = "value is not a valid IPv4 or IPv6 interface" + + +class IPvAnyNetworkError(PydanticValueError): + msg_template = "value is not a valid IPv4 or IPv6 network" + + +class IPv4AddressError(PydanticValueError): + msg_template = "value is not a valid IPv4 address" + + +class IPv6AddressError(PydanticValueError): + msg_template = "value is not a valid IPv6 address" + + +class IPv4NetworkError(PydanticValueError): + msg_template = "value is not a valid IPv4 network" + + +class IPv6NetworkError(PydanticValueError): + msg_template = "value is not a valid IPv6 network" + + +class IPv4InterfaceError(PydanticValueError): + msg_template = "value is not a valid IPv4 interface" + + +class IPv6InterfaceError(PydanticValueError): + msg_template = "value is not a valid IPv6 interface" + + +class ColorError(PydanticValueError): + msg_template = "value is not a valid color: {reason}" + + +class StrictBoolError(PydanticValueError): + msg_template = "value is not a valid boolean" + + +class NotDigitError(PydanticValueError): + code = "payment_card_number.digits" + msg_template = "card number is not all digits" + + +class LuhnValidationError(PydanticValueError): + code = "payment_card_number.luhn_check" + msg_template = "card number is not luhn valid" + + +class InvalidLengthForBrand(PydanticValueError): + code = "payment_card_number.invalid_length_for_brand" + msg_template = "Length for a {brand} card must be {required_length}" + + +class InvalidByteSize(PydanticValueError): + msg_template = "could not parse value and unit from byte string" + + +class InvalidByteSizeUnit(PydanticValueError): + msg_template = "could not interpret byte unit: {unit}" + + +class MissingDiscriminator(PydanticValueError): + code = "discriminated_union.missing_discriminator" + msg_template = "Discriminator {discriminator_key!r} is missing in value" + + +class InvalidDiscriminator(PydanticValueError): + code = "discriminated_union.invalid_discriminator" + msg_template = ( + "No match for discriminator {discriminator_key!r} and value {discriminator_value!r} " + "(allowed values: {allowed_values})" + ) + + def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None: + super().__init__( + discriminator_key=discriminator_key, + discriminator_value=discriminator_value, + allowed_values=", ".join(map(repr, allowed_values)), + ) diff --git a/vendor/pydantic/fields.py b/vendor/pydantic/fields.py new file mode 100644 index 0000000..85b7054 --- /dev/null +++ b/vendor/pydantic/fields.py @@ -0,0 +1,1256 @@ +import copy +import re +from collections import Counter as CollectionCounter +from collections import defaultdict, deque +from collections.abc import Callable +from collections.abc import Hashable as CollectionsHashable +from collections.abc import Iterable as CollectionsIterable +from typing import ( + TYPE_CHECKING, + Any, + Counter, + DefaultDict, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import Annotated, Final + +from . import errors as errors_ +from .class_validators import Validator, make_generic_validator, prep_validators +from .error_wrappers import ErrorWrapper +from .errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError +from .types import Json, JsonWrapper +from .typing import ( + NoArgAnyCallable, + convert_generics, + display_as_type, + get_args, + get_origin, + is_finalvar, + is_literal_type, + is_new_type, + is_none_type, + is_typeddict, + is_typeddict_special, + is_union, + new_type_supertype, +) +from .utils import ( + PyObjectStr, + Representation, + ValueItems, + get_discriminator_alias_and_values, + get_unique_discriminator_alias, + lenient_isinstance, + lenient_issubclass, + sequence_like, + smart_deepcopy, +) +from .validators import constant_validator, dict_validator, find_validators, validate_json + +Required: Any = Ellipsis + +T = TypeVar("T") + + +class UndefinedType: + def __repr__(self) -> str: + return "PydanticUndefined" + + def __copy__(self: T) -> T: + return self + + def __reduce__(self) -> str: + return "Undefined" + + def __deepcopy__(self: T, _: Any) -> T: + return self + + +Undefined = UndefinedType() + +if TYPE_CHECKING: + from .class_validators import ValidatorsList + from .config import BaseConfig + from .error_wrappers import ErrorList + from .types import ModelOrDc + from .typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs + + ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] + LocStr = Union[Tuple[Union[int, str], ...], str] + BoolUndefined = Union[bool, UndefinedType] + + +class FieldInfo(Representation): + """ + Captures extra information about a field. + """ + + __slots__ = ( + "default", + "default_factory", + "alias", + "alias_priority", + "title", + "description", + "exclude", + "include", + "const", + "gt", + "ge", + "lt", + "le", + "multiple_of", + "allow_inf_nan", + "max_digits", + "decimal_places", + "min_items", + "max_items", + "unique_items", + "min_length", + "max_length", + "allow_mutation", + "repr", + "regex", + "discriminator", + "extra", + ) + + # field constraints with the default value, it's also used in update_from_config below + __field_constraints__ = { + "min_length": None, + "max_length": None, + "regex": None, + "gt": None, + "lt": None, + "ge": None, + "le": None, + "multiple_of": None, + "allow_inf_nan": None, + "max_digits": None, + "decimal_places": None, + "min_items": None, + "max_items": None, + "unique_items": None, + "allow_mutation": True, + } + + def __init__(self, default: Any = Undefined, **kwargs: Any) -> None: + self.default = default + self.default_factory = kwargs.pop("default_factory", None) + self.alias = kwargs.pop("alias", None) + self.alias_priority = kwargs.pop("alias_priority", 2 if self.alias is not None else None) + self.title = kwargs.pop("title", None) + self.description = kwargs.pop("description", None) + self.exclude = kwargs.pop("exclude", None) + self.include = kwargs.pop("include", None) + self.const = kwargs.pop("const", None) + self.gt = kwargs.pop("gt", None) + self.ge = kwargs.pop("ge", None) + self.lt = kwargs.pop("lt", None) + self.le = kwargs.pop("le", None) + self.multiple_of = kwargs.pop("multiple_of", None) + self.allow_inf_nan = kwargs.pop("allow_inf_nan", None) + self.max_digits = kwargs.pop("max_digits", None) + self.decimal_places = kwargs.pop("decimal_places", None) + self.min_items = kwargs.pop("min_items", None) + self.max_items = kwargs.pop("max_items", None) + self.unique_items = kwargs.pop("unique_items", None) + self.min_length = kwargs.pop("min_length", None) + self.max_length = kwargs.pop("max_length", None) + self.allow_mutation = kwargs.pop("allow_mutation", True) + self.regex = kwargs.pop("regex", None) + self.discriminator = kwargs.pop("discriminator", None) + self.repr = kwargs.pop("repr", True) + self.extra = kwargs + + def __repr_args__(self) -> "ReprArgs": + field_defaults_to_hide: Dict[str, Any] = { + "repr": True, + **self.__field_constraints__, + } + + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)] + + def get_constraints(self) -> Set[str]: + """ + Gets the constraints set on the field by comparing the constraint value with its default value + + :return: the constraints set on field_info + """ + return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default} + + def update_from_config(self, from_config: Dict[str, Any]) -> None: + """ + Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated. + """ + for attr_name, value in from_config.items(): + try: + current_value = getattr(self, attr_name) + except AttributeError: + # attr_name is not an attribute of FieldInfo, it should therefore be added to extra + # (except if extra already has this value!) + self.extra.setdefault(attr_name, value) + else: + if current_value is self.__field_constraints__.get(attr_name, None): + setattr(self, attr_name, value) + elif attr_name == "exclude": + self.exclude = ValueItems.merge(value, current_value) + elif attr_name == "include": + self.include = ValueItems.merge(value, current_value, intersect=True) + + def _validate(self) -> None: + if self.default is not Undefined and self.default_factory is not None: + raise ValueError("cannot specify both default and default_factory") + + +def Field( + default: Any = Undefined, + *, + default_factory: Optional[NoArgAnyCallable] = None, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny", Any]] = None, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny", Any]] = None, + const: Optional[bool] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + multiple_of: Optional[float] = None, + allow_inf_nan: Optional[bool] = None, + max_digits: Optional[int] = None, + decimal_places: Optional[int] = None, + min_items: Optional[int] = None, + max_items: Optional[int] = None, + unique_items: Optional[bool] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + allow_mutation: bool = True, + regex: Optional[str] = None, + discriminator: Optional[str] = None, + repr: bool = True, + **extra: Any, +) -> Any: + """ + Used to provide extra information about a field, either for the model schema or complex validation. Some arguments + apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. + + :param default: since this is replacing the field’s default, its first argument is used + to set the default, use ellipsis (``...``) to indicate the field is required + :param default_factory: callable that will be called when a default value is needed for this field + If both `default` and `default_factory` are set, an error is raised. + :param alias: the public name of the field + :param title: can be any string, used in the schema + :param description: can be any string, used in the schema + :param exclude: exclude this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param include: include this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param const: this field is required and *must* take it's default value + :param gt: only applies to numbers, requires the field to be "greater than". The schema + will have an ``exclusiveMinimum`` validation keyword + :param ge: only applies to numbers, requires the field to be "greater than or equal to". The + schema will have a ``minimum`` validation keyword + :param lt: only applies to numbers, requires the field to be "less than". The schema + will have an ``exclusiveMaximum`` validation keyword + :param le: only applies to numbers, requires the field to be "less than or equal to". The + schema will have a ``maximum`` validation keyword + :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The + schema will have a ``multipleOf`` validation keyword + :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), + which is a valid Python float. Default True, set to False for compatibility with JSON. + :param max_digits: only applies to Decimals, requires the field to have a maximum number + of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. + :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places + allowed. It does not include trailing decimal zeroes. + :param min_items: only applies to lists, requires the field to have a minimum number of + elements. The schema will have a ``minItems`` validation keyword + :param max_items: only applies to lists, requires the field to have a maximum number of + elements. The schema will have a ``maxItems`` validation keyword + :param unique_items: only applies to lists, requires the field not to have duplicated + elements. The schema will have a ``uniqueItems`` validation keyword + :param min_length: only applies to strings, requires the field to have a minimum length. The + schema will have a ``minLength`` validation keyword + :param max_length: only applies to strings, requires the field to have a maximum length. The + schema will have a ``maxLength`` validation keyword + :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is + assigned on an instance. The BaseModel Config must set validate_assignment to True + :param regex: only applies to strings, requires the field match against a regular expression + pattern string. The schema will have a ``pattern`` validation keyword + :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field. + The `discriminator` is the name of this common field to shorten validation and improve generated schema + :param repr: show this field in the representation + :param **extra: any additional keyword arguments will be added as is to the schema + """ + field_info = FieldInfo( + default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + exclude=exclude, + include=include, + const=const, + gt=gt, + ge=ge, + lt=lt, + le=le, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + min_items=min_items, + max_items=max_items, + unique_items=unique_items, + min_length=min_length, + max_length=max_length, + allow_mutation=allow_mutation, + regex=regex, + discriminator=discriminator, + repr=repr, + **extra, + ) + field_info._validate() + return field_info + + +# used to be an enum but changed to int's for small performance improvement as less access overhead +SHAPE_SINGLETON = 1 +SHAPE_LIST = 2 +SHAPE_SET = 3 +SHAPE_MAPPING = 4 +SHAPE_TUPLE = 5 +SHAPE_TUPLE_ELLIPSIS = 6 +SHAPE_SEQUENCE = 7 +SHAPE_FROZENSET = 8 +SHAPE_ITERABLE = 9 +SHAPE_GENERIC = 10 +SHAPE_DEQUE = 11 +SHAPE_DICT = 12 +SHAPE_DEFAULTDICT = 13 +SHAPE_COUNTER = 14 +SHAPE_NAME_LOOKUP = { + SHAPE_LIST: "List[{}]", + SHAPE_SET: "Set[{}]", + SHAPE_TUPLE_ELLIPSIS: "Tuple[{}, ...]", + SHAPE_SEQUENCE: "Sequence[{}]", + SHAPE_FROZENSET: "FrozenSet[{}]", + SHAPE_ITERABLE: "Iterable[{}]", + SHAPE_DEQUE: "Deque[{}]", + SHAPE_DICT: "Dict[{}]", + SHAPE_DEFAULTDICT: "DefaultDict[{}]", + SHAPE_COUNTER: "Counter[{}]", +} + +MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER} + + +class ModelField(Representation): + __slots__ = ( + "type_", + "outer_type_", + "annotation", + "sub_fields", + "sub_fields_mapping", + "key_field", + "validators", + "pre_validators", + "post_validators", + "default", + "default_factory", + "required", + "final", + "model_config", + "name", + "alias", + "has_alias", + "field_info", + "discriminator_key", + "discriminator_alias", + "validate_always", + "allow_none", + "shape", + "class_validators", + "parse_json", + ) + + def __init__( + self, + *, + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]], + model_config: Type["BaseConfig"], + default: Any = None, + default_factory: Optional[NoArgAnyCallable] = None, + required: "BoolUndefined" = Undefined, + final: bool = False, + alias: Optional[str] = None, + field_info: Optional[FieldInfo] = None, + ) -> None: + self.name: str = name + self.has_alias: bool = alias is not None + self.alias: str = alias if alias is not None else name + self.annotation = type_ + self.type_: Any = convert_generics(type_) + self.outer_type_: Any = type_ + self.class_validators = class_validators or {} + self.default: Any = default + self.default_factory: Optional[NoArgAnyCallable] = default_factory + self.required: "BoolUndefined" = required + self.final: bool = final + self.model_config = model_config + self.field_info: FieldInfo = field_info or FieldInfo(default) + self.discriminator_key: Optional[str] = self.field_info.discriminator + self.discriminator_alias: Optional[str] = self.discriminator_key + + self.allow_none: bool = False + self.validate_always: bool = False + self.sub_fields: Optional[List[ModelField]] = None + self.sub_fields_mapping: Optional[Dict[str, "ModelField"]] = None # used for discriminated union + self.key_field: Optional[ModelField] = None + self.validators: "ValidatorsList" = [] + self.pre_validators: Optional["ValidatorsList"] = None + self.post_validators: Optional["ValidatorsList"] = None + self.parse_json: bool = False + self.shape: int = SHAPE_SINGLETON + self.model_config.prepare_field(self) + self.prepare() + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + @staticmethod + def _get_field_info( + field_name: str, annotation: Any, value: Any, config: Type["BaseConfig"] + ) -> Tuple[FieldInfo, Any]: + """ + Get a FieldInfo from a root typing.Annotated annotation, value, or config default. + + The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain + a FieldInfo, a new one will be created using the config. + + :param field_name: name of the field for use in error messages + :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]` + :param value: the field's assigned value + :param config: the model's config object + :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config. + """ + field_info_from_config = config.get_field_info(field_name) + + field_info = None + if get_origin(annotation) is Annotated: + field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)] + if len(field_infos) > 1: + raise ValueError(f"cannot specify multiple `Annotated` `Field`s for {field_name!r}") + field_info = next(iter(field_infos), None) + if field_info is not None: + field_info = copy.copy(field_info) + field_info.update_from_config(field_info_from_config) + if field_info.default not in (Undefined, Required): + raise ValueError(f"`Field` default cannot be set in `Annotated` for {field_name!r}") + if value is not Undefined and value is not Required: + # check also `Required` because of `validate_arguments` that sets `...` as default value + field_info.default = value + + if isinstance(value, FieldInfo): + if field_info is not None: + raise ValueError(f"cannot specify `Annotated` and value `Field`s together for {field_name!r}") + field_info = value + field_info.update_from_config(field_info_from_config) + elif field_info is None: + field_info = FieldInfo(value, **field_info_from_config) + value = None if field_info.default_factory is not None else field_info.default + field_info._validate() + return field_info, value + + @classmethod + def infer( + cls, + *, + name: str, + value: Any, + annotation: Any, + class_validators: Optional[Dict[str, Validator]], + config: Type["BaseConfig"], + ) -> "ModelField": + from .schema import get_annotation_from_field_info + + field_info, value = cls._get_field_info(name, annotation, value, config) + required: "BoolUndefined" = Undefined + if value is Required: + required = True + value = None + elif value is not Undefined: + required = False + annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment) + + return cls( + name=name, + type_=annotation, + alias=field_info.alias, + class_validators=class_validators, + default=value, + default_factory=field_info.default_factory, + required=required, + model_config=config, + field_info=field_info, + ) + + def set_config(self, config: Type["BaseConfig"]) -> None: + self.model_config = config + info_from_config = config.get_field_info(self.name) + config.prepare_field(self) + new_alias = info_from_config.get("alias") + new_alias_priority = info_from_config.get("alias_priority") or 0 + if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0): + self.field_info.alias = new_alias + self.field_info.alias_priority = new_alias_priority + self.alias = new_alias + new_exclude = info_from_config.get("exclude") + if new_exclude is not None: + self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude) + new_include = info_from_config.get("include") + if new_include is not None: + self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True) + + @property + def alt_alias(self) -> bool: + return self.name != self.alias + + def prepare(self) -> None: + """ + Prepare the field but inspecting self.default, self.type_ etc. + + Note: this method is **not** idempotent (because _type_analysis is not idempotent), + e.g. calling it it multiple times may modify the field and configure it incorrectly. + """ + self._set_default_and_type() + if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType: + # self.type_ is currently a ForwardRef and there's nothing we can do now, + # user will need to call model.update_forward_refs() + return + + self._type_analysis() + if self.required is Undefined: + self.required = True + if self.default is Undefined and self.default_factory is None: + self.default = None + self.populate_validators() + + def _set_default_and_type(self) -> None: + """ + Set the default value, infer the type if needed and check if `None` value is valid. + """ + if self.default_factory is not None: + if self.type_ is Undefined: + raise errors_.ConfigError( + f"you need to set the type of field {self.name!r} when using `default_factory`" + ) + return + + default_value = self.get_default() + + if default_value is not None and self.type_ is Undefined: + self.type_ = default_value.__class__ + self.outer_type_ = self.type_ + self.annotation = self.type_ + + if self.type_ is Undefined: + raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') + + if self.required is False and default_value is None: + self.allow_none = True + + def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) + # typing interface is horrible, we have to do some ugly checks + if lenient_issubclass(self.type_, JsonWrapper): + self.type_ = self.type_.inner_type + self.parse_json = True + elif lenient_issubclass(self.type_, Json): + self.type_ = Any + self.parse_json = True + elif isinstance(self.type_, TypeVar): + if self.type_.__bound__: + self.type_ = self.type_.__bound__ + elif self.type_.__constraints__: + self.type_ = Union[self.type_.__constraints__] + else: + self.type_ = Any + elif is_new_type(self.type_): + self.type_ = new_type_supertype(self.type_) + + if self.type_ is Any or self.type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + return + elif self.type_ is Pattern or self.type_ is re.Pattern: + # python 3.7 only, Pattern is a typing object but without sub fields + return + elif is_literal_type(self.type_): + return + elif is_typeddict(self.type_): + return + + if is_finalvar(self.type_): + self.final = True + + if self.type_ is Final: + self.type_ = Any + else: + self.type_ = get_args(self.type_)[0] + + self._type_analysis() + return + + origin = get_origin(self.type_) + + if origin is Annotated or is_typeddict_special(origin): + self.type_ = get_args(self.type_)[0] + self._type_analysis() + return + + if self.discriminator_key is not None and not is_union(origin): + raise TypeError("`discriminator` can only be used with `Union` type with more than one variant") + + # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None` + if origin is None or origin is CollectionsHashable: + # field is not "typing" object eg. Union, Dict, List etc. + # allow None for virtual superclasses of NoneType, e.g. Hashable + if isinstance(self.type_, type) and isinstance(None, self.type_): + self.allow_none = True + return + elif origin is Callable: + return + elif is_union(origin): + types_ = [] + for type_ in get_args(self.type_): + if is_none_type(type_) or type_ is Any or type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + if is_none_type(type_): + continue + types_.append(type_) + + if len(types_) == 1: + # Optional[] + self.type_ = types_[0] + # this is the one case where the "outer type" isn't just the original type + self.outer_type_ = self.type_ + # re-run to correctly interpret the new self.type_ + self._type_analysis() + else: + self.sub_fields = [self._create_sub_type(t, f"{self.name}_{display_as_type(t)}") for t in types_] + + if self.discriminator_key is not None: + self.prepare_discriminated_union_sub_fields() + return + elif issubclass(origin, Tuple): # type: ignore + # origin == Tuple without item type + args = get_args(self.type_) + if not args: # plain tuple + self.type_ = Any + self.shape = SHAPE_TUPLE_ELLIPSIS + elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...] + self.type_ = args[0] + self.shape = SHAPE_TUPLE_ELLIPSIS + self.sub_fields = [self._create_sub_type(args[0], f"{self.name}_0")] + elif args == ((),): # Tuple[()] means empty tuple + self.shape = SHAPE_TUPLE + self.type_ = Any + self.sub_fields = [] + else: + self.shape = SHAPE_TUPLE + self.sub_fields = [self._create_sub_type(t, f"{self.name}_{i}") for i, t in enumerate(args)] + return + elif issubclass(origin, List): + # Create self validators + get_validators = getattr(self.type_, "__get_validators__", None) + if get_validators: + self.class_validators.update( + {f"list_{i}": Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_LIST + elif issubclass(origin, Set): + # Create self validators + get_validators = getattr(self.type_, "__get_validators__", None) + if get_validators: + self.class_validators.update( + {f"set_{i}": Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SET + elif issubclass(origin, FrozenSet): + # Create self validators + get_validators = getattr(self.type_, "__get_validators__", None) + if get_validators: + self.class_validators.update( + {f"frozenset_{i}": Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_FROZENSET + elif issubclass(origin, Deque): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_DEQUE + elif issubclass(origin, Sequence): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SEQUENCE + # priority to most common mapping: dict + elif origin is dict or origin is Dict: + self.key_field = self._create_sub_type(get_args(self.type_)[0], "key_" + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DICT + elif issubclass(origin, DefaultDict): + self.key_field = self._create_sub_type(get_args(self.type_)[0], "key_" + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DEFAULTDICT + elif issubclass(origin, Counter): + self.key_field = self._create_sub_type(get_args(self.type_)[0], "key_" + self.name, for_keys=True) + self.type_ = int + self.shape = SHAPE_COUNTER + elif issubclass(origin, Mapping): + self.key_field = self._create_sub_type(get_args(self.type_)[0], "key_" + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_MAPPING + # Equality check as almost everything inherits form Iterable, including str + # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other + elif origin in {Iterable, CollectionsIterable}: + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_ITERABLE + self.sub_fields = [self._create_sub_type(self.type_, f"{self.name}_type")] + elif issubclass(origin, Type): # type: ignore + return + elif hasattr(origin, "__get_validators__") or self.model_config.arbitrary_types_allowed: + # Is a Pydantic-compatible generic that handles itself + # or we have arbitrary_types_allowed = True + self.shape = SHAPE_GENERIC + self.sub_fields = [self._create_sub_type(t, f"{self.name}_{i}") for i, t in enumerate(get_args(self.type_))] + self.type_ = origin + return + else: + raise TypeError(f'Fields of type "{origin}" are not supported.') + + # type_ has been refined eg. as the type of a List and sub_fields needs to be populated + self.sub_fields = [self._create_sub_type(self.type_, "_" + self.name)] + + def prepare_discriminated_union_sub_fields(self) -> None: + """ + Prepare the mapping -> and update `sub_fields` + Note that this process can be aborted if a `ForwardRef` is encountered + """ + assert self.discriminator_key is not None + + if self.type_.__class__ is DeferredType: + return + + assert self.sub_fields is not None + sub_fields_mapping: Dict[str, "ModelField"] = {} + all_aliases: Set[str] = set() + + for sub_field in self.sub_fields: + t = sub_field.type_ + if t.__class__ is ForwardRef: + # Stopping everything...will need to call `update_forward_refs` + return + + alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key) + all_aliases.add(alias) + for discriminator_value in discriminator_values: + sub_fields_mapping[discriminator_value] = sub_field + + self.sub_fields_mapping = sub_fields_mapping + self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key) + + def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> "ModelField": + if for_keys: + class_validators = None + else: + # validators for sub items should not have `each_item` as we want to check only the first sublevel + class_validators = { + k: Validator( + func=v.func, + pre=v.pre, + each_item=False, + always=v.always, + check_fields=v.check_fields, + skip_on_failure=v.skip_on_failure, + ) + for k, v in self.class_validators.items() + if v.each_item + } + + field_info, _ = self._get_field_info(name, type_, None, self.model_config) + + return self.__class__( + type_=type_, + name=name, + class_validators=class_validators, + model_config=self.model_config, + field_info=field_info, + ) + + def populate_validators(self) -> None: + """ + Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ + and class validators. This method should be idempotent, e.g. it should be safe to call multiple times + without mis-configuring the field. + """ + self.validate_always = getattr(self.type_, "validate_always", False) or any( + v.always for v in self.class_validators.values() + ) + + class_validators_ = self.class_validators.values() + if not self.sub_fields or self.shape == SHAPE_GENERIC: + get_validators = getattr(self.type_, "__get_validators__", None) + v_funcs = ( + *[v.func for v in class_validators_ if v.each_item and v.pre], + *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), + *[v.func for v in class_validators_ if v.each_item and not v.pre], + ) + self.validators = prep_validators(v_funcs) + + self.pre_validators = [] + self.post_validators = [] + + if self.field_info and self.field_info.const: + self.post_validators.append(make_generic_validator(constant_validator)) + + if class_validators_: + self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) + self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) + + if self.parse_json: + self.pre_validators.append(make_generic_validator(validate_json)) + + self.pre_validators = self.pre_validators or None + self.post_validators = self.post_validators or None + + def validate( + self, v: Any, values: Dict[str, Any], *, loc: "LocStr", cls: Optional["ModelOrDc"] = None + ) -> "ValidateReturn": + assert self.type_.__class__ is not DeferredType + + if self.type_.__class__ is ForwardRef: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f"you might need to call {cls.__name__}.update_forward_refs()." + ) + + errors: Optional["ErrorList"] + if self.pre_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) + if errors: + return v, errors + + if v is None: + if is_none_type(self.type_): + # keep validating + pass + elif self.allow_none: + if self.post_validators: + return self._apply_validators(v, values, loc, cls, self.post_validators) + else: + return None, None + else: + return v, ErrorWrapper(NoneIsNotAllowedError(), loc) + + if self.shape == SHAPE_SINGLETON: + v, errors = self._validate_singleton(v, values, loc, cls) + elif self.shape in MAPPING_LIKE_SHAPES: + v, errors = self._validate_mapping_like(v, values, loc, cls) + elif self.shape == SHAPE_TUPLE: + v, errors = self._validate_tuple(v, values, loc, cls) + elif self.shape == SHAPE_ITERABLE: + v, errors = self._validate_iterable(v, values, loc, cls) + elif self.shape == SHAPE_GENERIC: + v, errors = self._apply_validators(v, values, loc, cls, self.validators) + else: + # sequence, list, set, generator, tuple with ellipsis, frozen set + v, errors = self._validate_sequence_like(v, values, loc, cls) + + if not errors and self.post_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) + return v, errors + + def _validate_sequence_like( # noqa: C901 (ignore complexity) + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"] + ) -> "ValidateReturn": + """ + Validate sequence-like containers: lists, tuples, sets and generators + Note that large if-else blocks are necessary to enable Cython + optimization, which is why we disable the complexity check above. + """ + if not sequence_like(v): + e: errors_.PydanticTypeError + if self.shape == SHAPE_LIST: + e = errors_.ListError() + elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS): + e = errors_.TupleError() + elif self.shape == SHAPE_SET: + e = errors_.SetError() + elif self.shape == SHAPE_FROZENSET: + e = errors_.FrozenSetError() + else: + e = errors_.SequenceError() + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, v_ in enumerate(v): + v_loc = *loc, i + r, ee = self._validate_singleton(v_, values, v_loc, cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + + converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result + + if self.shape == SHAPE_SET: + converted = set(result) + elif self.shape == SHAPE_FROZENSET: + converted = frozenset(result) + elif self.shape == SHAPE_TUPLE_ELLIPSIS: + converted = tuple(result) + elif self.shape == SHAPE_DEQUE: + converted = deque(result, maxlen=getattr(v, "maxlen", None)) + elif self.shape == SHAPE_SEQUENCE: + if isinstance(v, tuple): + converted = tuple(result) + elif isinstance(v, set): + converted = set(result) + elif isinstance(v, Generator): + converted = iter(result) + elif isinstance(v, deque): + converted = deque(result, maxlen=getattr(v, "maxlen", None)) + return converted, None + + def _validate_iterable( + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"] + ) -> "ValidateReturn": + """ + Validate Iterables. + + This intentionally doesn't validate values to allow infinite generators. + """ + + try: + iterable = iter(v) + except TypeError: + return v, ErrorWrapper(errors_.IterableError(), loc) + return iterable, None + + def _validate_tuple( + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"] + ) -> "ValidateReturn": + e: Optional[Exception] = None + if not sequence_like(v): + e = errors_.TupleError() + else: + actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore + if actual_length != expected_length: + e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) + + if e: + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore + v_loc = *loc, i + r, ee = field.validate(v_, values, loc=v_loc, cls=cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + else: + return tuple(result), None + + def _validate_mapping_like( + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"] + ) -> "ValidateReturn": + try: + v_iter = dict_validator(v) + except TypeError as exc: + return v, ErrorWrapper(exc, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result, errors = {}, [] + for k, v_ in v_iter.items(): + v_loc = *loc, "__key__" + key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore + if key_errors: + errors.append(key_errors) + continue + + v_loc = *loc, k + value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) + if value_errors: + errors.append(value_errors) + continue + + result[key_result] = value_result + if errors: + return v, errors + elif self.shape == SHAPE_DICT: + return result, None + elif self.shape == SHAPE_DEFAULTDICT: + return defaultdict(self.type_, result), None + elif self.shape == SHAPE_COUNTER: + return CollectionCounter(result), None + else: + return self._get_mapping_value(v, result), None + + def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]: + """ + When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid + coercing to `dict` unwillingly. + """ + original_cls = original.__class__ + + if original_cls == dict or original_cls == Dict: + return converted + elif original_cls in {defaultdict, DefaultDict}: + return defaultdict(self.type_, converted) + else: + try: + # Counter, OrderedDict, UserDict, ... + return original_cls(converted) # type: ignore + except TypeError: + raise RuntimeError(f"Could not convert dictionary to {original_cls.__name__!r}") from None + + def _validate_singleton( + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"] + ) -> "ValidateReturn": + if self.sub_fields: + if self.discriminator_key is not None: + return self._validate_discriminated_union(v, values, loc, cls) + + errors = [] + + if self.model_config.smart_union and is_union(get_origin(self.type_)): + # 1st pass: check if the value is an exact instance of one of the Union types + # (e.g. to avoid coercing a bool into an int) + for field in self.sub_fields: + if v.__class__ is field.outer_type_: + return v, None + + # 2nd pass: check if the value is an instance of any subclass of the Union types + for field in self.sub_fields: + # This whole logic will be improved later on to support more complex `isinstance` checks + # It will probably be done once a strict mode is added and be something like: + # ``` + # value, error = field.validate(v, values, strict=True) + # if error is None: + # return value, None + # ``` + try: + if isinstance(v, field.outer_type_): + return v, None + except TypeError: + # compound type + if lenient_isinstance(v, get_origin(field.outer_type_)): + value, error = field.validate(v, values, loc=loc, cls=cls) + if not error: + return value, None + + # 1st pass by default or 3rd pass with `smart_union` enabled: + # check if the value can be coerced into one of the Union types + for field in self.sub_fields: + value, error = field.validate(v, values, loc=loc, cls=cls) + if error: + errors.append(error) + else: + return value, None + return v, errors + else: + return self._apply_validators(v, values, loc, cls, self.validators) + + def _validate_discriminated_union( + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"] + ) -> "ValidateReturn": + assert self.discriminator_key is not None + assert self.discriminator_alias is not None + + try: + try: + discriminator_value = v[self.discriminator_alias] + except KeyError: + if self.model_config.allow_population_by_field_name: + discriminator_value = v[self.discriminator_key] + else: + raise + except KeyError: + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + except TypeError: + try: + # BaseModel or dataclass + discriminator_value = getattr(v, self.discriminator_key) + except (AttributeError, TypeError): + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + + if self.sub_fields_mapping is None: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f"you might need to call {cls.__name__}.update_forward_refs()." + ) + + try: + sub_field = self.sub_fields_mapping[discriminator_value] + except (KeyError, TypeError): + # KeyError: `discriminator_value` is not in the dictionary. + # TypeError: `discriminator_value` is unhashable. + assert self.sub_fields_mapping is not None + return v, ErrorWrapper( + InvalidDiscriminator( + discriminator_key=self.discriminator_key, + discriminator_value=discriminator_value, + allowed_values=list(self.sub_fields_mapping), + ), + loc, + ) + else: + if not isinstance(loc, tuple): + loc = (loc,) + return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls) + + def _apply_validators( + self, v: Any, values: Dict[str, Any], loc: "LocStr", cls: Optional["ModelOrDc"], validators: "ValidatorsList" + ) -> "ValidateReturn": + for validator in validators: + try: + v = validator(cls, v, values, self, self.model_config) + except (ValueError, TypeError, AssertionError) as exc: + return v, ErrorWrapper(exc, loc) + return v, None + + def is_complex(self) -> bool: + """ + Whether the field is "complex" eg. env variables should be parsed as JSON. + """ + from .main import BaseModel + + return ( + self.shape != SHAPE_SINGLETON + or hasattr(self.type_, "__pydantic_model__") + or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict)) + ) + + def _type_display(self) -> PyObjectStr: + t = display_as_type(self.type_) + + if self.shape in MAPPING_LIKE_SHAPES: + t = f"Mapping[{display_as_type(self.key_field.type_)}, {t}]" # type: ignore + elif self.shape == SHAPE_TUPLE: + t = "Tuple[{}]".format(", ".join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore + elif self.shape == SHAPE_GENERIC: + assert self.sub_fields + t = "{}[{}]".format( + display_as_type(self.type_), ", ".join(display_as_type(f.type_) for f in self.sub_fields) + ) + elif self.shape != SHAPE_SINGLETON: + t = SHAPE_NAME_LOOKUP[self.shape].format(t) + + if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): + t = f"Optional[{t}]" + return PyObjectStr(t) + + def __repr_args__(self) -> "ReprArgs": + args = [("name", self.name), ("type", self._type_display()), ("required", self.required)] + + if not self.required: + if self.default_factory is not None: + args.append(("default_factory", f"")) + else: + args.append(("default", self.default)) + + if self.alt_alias: + args.append(("alias", self.alias)) + return args + + +class ModelPrivateAttr(Representation): + __slots__ = ("default", "default_factory") + + def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None: + self.default = default + self.default_factory = default_factory + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( + other.default, + other.default_factory, + ) + + +def PrivateAttr( + default: Any = Undefined, + *, + default_factory: Optional[NoArgAnyCallable] = None, +) -> Any: + """ + Indicates that attribute is only used internally and never mixed with regular fields. + + Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant. + + Private attrs are stored in model __slots__. + + :param default: the attribute’s default value + :param default_factory: callable that will be called when a default value is needed for this attribute + If both `default` and `default_factory` are set, an error is raised. + """ + if default is not Undefined and default_factory is not None: + raise ValueError("cannot specify both default and default_factory") + + return ModelPrivateAttr( + default, + default_factory=default_factory, + ) + + +class DeferredType: + """ + Used to postpone field preparation, while creating recursive generic models. + """ + + +def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool: + return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo) diff --git a/vendor/pydantic/json.py b/vendor/pydantic/json.py new file mode 100644 index 0000000..7004966 --- /dev/null +++ b/vendor/pydantic/json.py @@ -0,0 +1,112 @@ +import datetime +from collections import deque +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from re import Pattern +from types import GeneratorType +from typing import Any, Callable, Dict, Type, Union +from uuid import UUID + +from .color import Color +from .networks import NameEmail +from .types import SecretBytes, SecretStr + +__all__ = "pydantic_encoder", "custom_pydantic_encoder", "timedelta_isoformat" + + +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """ + Encodes a Decimal as int of there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + if dec_value.as_tuple().exponent >= 0: + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, +} + + +def pydantic_encoder(obj: Any) -> Any: + from dataclasses import asdict, is_dataclass + + from .main import BaseModel + + if isinstance(obj, BaseModel): + return obj.dict() + elif is_dataclass(obj): + return asdict(obj) + + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = ENCODERS_BY_TYPE[base] + except KeyError: + continue + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") + + +def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = type_encoders[base] + except KeyError: + continue + + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + return pydantic_encoder(obj) + + +def timedelta_isoformat(td: datetime.timedelta) -> str: + """ + ISO 8601 encoding for Python timedelta object. + """ + minutes, seconds = divmod(td.seconds, 60) + hours, minutes = divmod(minutes, 60) + return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/vendor/pydantic/main.py b/vendor/pydantic/main.py new file mode 100644 index 0000000..c810f51 --- /dev/null +++ b/vendor/pydantic/main.py @@ -0,0 +1,1107 @@ +import warnings +from abc import ABCMeta +from copy import deepcopy +from enum import Enum +from functools import partial +from pathlib import Path +from types import FunctionType, prepare_class, resolve_bases +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + ClassVar, + Dict, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, + no_type_check, + overload, +) + +from typing_extensions import dataclass_transform + +from .class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators +from .config import BaseConfig, Extra, inherit_config, prepare_config +from .error_wrappers import ErrorWrapper, ValidationError +from .errors import ConfigError, DictError, ExtraError, MissingError +from .fields import ( + MAPPING_LIKE_SHAPES, + Field, + ModelField, + ModelPrivateAttr, + PrivateAttr, + Undefined, + is_finalvar_with_default_val, +) +from .json import custom_pydantic_encoder, pydantic_encoder +from .parse import Protocol, load_file, load_str_bytes +from .schema import default_ref_template, model_schema +from .types import PyObject, StrBytes +from .typing import ( + AnyCallable, + get_args, + get_origin, + is_classvar, + is_namedtuple, + is_union, + resolve_annotations, + update_model_forward_refs, +) +from .utils import ( + DUNDER_ATTRIBUTES, + ROOT_KEY, + ClassAttribute, + GetterDict, + Representation, + ValueItems, + generate_model_signature, + is_valid_field, + is_valid_private_name, + lenient_issubclass, + sequence_like, + smart_deepcopy, + unique_list, + validate_field_name, +) + +if TYPE_CHECKING: + from inspect import Signature + + from .class_validators import ValidatorListDict + from .types import ModelOrDc + from .typing import ( + AbstractSetIntStr, + AnyClassMethod, + CallableGenerator, + DictAny, + DictStrAny, + MappingIntStrAny, + ReprArgs, + SetStr, + TupleGenerator, + ) + + Model = TypeVar("Model", bound="BaseModel") + +__all__ = "BaseModel", "create_model", "validate_model" + +_T = TypeVar("_T") + + +def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: + if len(fields) > 1: + raise ValueError(f"{ROOT_KEY} cannot be mixed with other fields") + + +def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]: + def hash_function(self_: Any) -> int: + return hash(self_.__class__) + hash(tuple(self_.__dict__.values())) + + return hash_function if frozen else None + + +# If a field is of type `Callable`, its default value should be a function and cannot to ignored. +ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod) +# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model +UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES +# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra +# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's +# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for +# the `BaseModel` class, since that's defined immediately after the metaclass. +_is_base_model_class_defined = False + + +@dataclass_transform(kw_only_default=True, field_specifiers=(Field,)) +class ModelMetaclass(ABCMeta): + @no_type_check # noqa C901 + def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 + fields: Dict[str, ModelField] = {} + config = BaseConfig + validators: "ValidatorListDict" = {} + + pre_root_validators, post_root_validators = [], [] + private_attributes: Dict[str, ModelPrivateAttr] = {} + base_private_attributes: Dict[str, ModelPrivateAttr] = {} + slots: SetStr = namespace.get("__slots__", ()) + slots = {slots} if isinstance(slots, str) else set(slots) + class_vars: SetStr = set() + hash_func: Optional[Callable[[Any], int]] = None + + for base in reversed(bases): + if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: + fields.update(smart_deepcopy(base.__fields__)) + config = inherit_config(base.__config__, config) + validators = inherit_validators(base.__validators__, validators) + pre_root_validators += base.__pre_root_validators__ + post_root_validators += base.__post_root_validators__ + base_private_attributes.update(base.__private_attributes__) + class_vars.update(base.__class_vars__) + hash_func = base.__hash__ + + resolve_forward_refs = kwargs.pop("__resolve_forward_refs__", True) + allowed_config_kwargs: SetStr = { + key + for key in dir(config) + if not (key.startswith("__") and key.endswith("__")) # skip dunder methods and attributes + } + config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs} + config_from_namespace = namespace.get("Config") + if config_kwargs and config_from_namespace: + raise TypeError("Specifying config in two places is ambiguous, use either Config attribute or class kwargs") + config = inherit_config(config_from_namespace, config, **config_kwargs) + + validators = inherit_validators(extract_validators(namespace), validators) + vg = ValidatorGroup(validators) + + for f in fields.values(): + f.set_config(config) + extra_validators = vg.get_validators(f.name) + if extra_validators: + f.class_validators.update(extra_validators) + # re-run prepare to add extra validators + f.populate_validators() + + prepare_config(config, name) + + untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES + + def is_untouched(v: Any) -> bool: + return isinstance(v, untouched_types) or v.__class__.__name__ == "cython_function_or_method" + + if (namespace.get("__module__"), namespace.get("__qualname__")) != ("pydantic.main", "BaseModel"): + annotations = resolve_annotations(namespace.get("__annotations__", {}), namespace.get("__module__", None)) + # annotation only fields need to come first in fields + for ann_name, ann_type in annotations.items(): + if is_classvar(ann_type): + class_vars.add(ann_name) + elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)): + class_vars.add(ann_name) + elif is_valid_field(ann_name): + validate_field_name(bases, ann_name) + value = namespace.get(ann_name, Undefined) + allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,) + if ( + is_untouched(value) + and ann_type != PyObject + and not any( + lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types + ) + ): + continue + fields[ann_name] = ModelField.infer( + name=ann_name, + value=value, + annotation=ann_type, + class_validators=vg.get_validators(ann_name), + config=config, + ) + elif ann_name not in namespace and config.underscore_attrs_are_private: + private_attributes[ann_name] = PrivateAttr() + + untouched_types = UNTOUCHED_TYPES + config.keep_untouched + for var_name, value in namespace.items(): + can_be_changed = var_name not in class_vars and not is_untouched(value) + if isinstance(value, ModelPrivateAttr): + if not is_valid_private_name(var_name): + raise NameError( + f'Private attributes "{var_name}" must not be a valid field name; ' + f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"' + ) + private_attributes[var_name] = value + elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed: + private_attributes[var_name] = PrivateAttr(default=value) + elif is_valid_field(var_name) and var_name not in annotations and can_be_changed: + validate_field_name(bases, var_name) + inferred = ModelField.infer( + name=var_name, + value=value, + annotation=annotations.get(var_name, Undefined), + class_validators=vg.get_validators(var_name), + config=config, + ) + if var_name in fields: + if lenient_issubclass(inferred.type_, fields[var_name].type_): + inferred.type_ = fields[var_name].type_ + else: + raise TypeError( + f"The type of {name}.{var_name} differs from the new default value; " + f"if you wish to change the type of this field, please use a type annotation" + ) + fields[var_name] = inferred + + _custom_root_type = ROOT_KEY in fields + if _custom_root_type: + validate_custom_root_type(fields) + vg.check_for_unused() + if config.json_encoders: + json_encoder = partial(custom_pydantic_encoder, config.json_encoders) + else: + json_encoder = pydantic_encoder + pre_rv_new, post_rv_new = extract_root_validators(namespace) + + if hash_func is None: + hash_func = generate_hash_function(config.frozen) + + exclude_from_namespace = fields | private_attributes.keys() | {"__slots__"} + new_namespace = { + "__config__": config, + "__fields__": fields, + "__exclude_fields__": { + name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None + } + or None, + "__include_fields__": { + name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None + } + or None, + "__validators__": vg.validators, + "__pre_root_validators__": unique_list( + pre_root_validators + pre_rv_new, + name_factory=lambda v: v.__name__, + ), + "__post_root_validators__": unique_list( + post_root_validators + post_rv_new, + name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, + ), + "__schema_cache__": {}, + "__json_encoder__": staticmethod(json_encoder), + "__custom_root_type__": _custom_root_type, + "__private_attributes__": {**base_private_attributes, **private_attributes}, + "__slots__": slots | private_attributes.keys(), + "__hash__": hash_func, + "__class_vars__": class_vars, + **{n: v for n, v in namespace.items() if n not in exclude_from_namespace}, + } + + cls = super().__new__(mcs, name, bases, new_namespace, **kwargs) + # set __signature__ attr only for model class, but not for its instances + cls.__signature__ = ClassAttribute("__signature__", generate_model_signature(cls.__init__, fields, config)) + if resolve_forward_refs: + cls.__try_update_forward_refs__() + + # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 + # for attributes not in `new_namespace` (e.g. private attributes) + for name, obj in namespace.items(): + if name not in new_namespace: + set_name = getattr(obj, "__set_name__", None) + if callable(set_name): + set_name(cls, name) + + return cls + + def __instancecheck__(self, instance: Any) -> bool: + """ + Avoid calling ABC _abc_subclasscheck unless we're pretty sure. + + See #3829 and python/cpython#92810 + """ + return hasattr(instance, "__fields__") and super().__instancecheck__(instance) + + +object_setattr = object.__setattr__ + + +class BaseModel(Representation, metaclass=ModelMetaclass): + if TYPE_CHECKING: + # populated by the metaclass, defined here to help IDEs only + __fields__: ClassVar[Dict[str, ModelField]] = {} + __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __validators__: ClassVar[Dict[str, AnyCallable]] = {} + __pre_root_validators__: ClassVar[List[AnyCallable]] + __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]] + __config__: ClassVar[Type[BaseConfig]] = BaseConfig + __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x + __schema_cache__: ClassVar["DictAny"] = {} + __custom_root_type__: ClassVar[bool] = False + __signature__: ClassVar["Signature"] + __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] + __class_vars__: ClassVar[SetStr] + __fields_set__: ClassVar[SetStr] = set() + + Config = BaseConfig + __slots__ = ("__dict__", "__fields_set__") + __doc__ = "" # Null out the Representation docstring + + def __init__(__pydantic_self__, **data: Any) -> None: + """ + Create a new model by parsing and validating input data from keyword arguments. + + Raises ValidationError if the input data cannot be parsed to form a valid model. + """ + # Uses something other than `self` the first arg to allow "self" as a settable attribute + values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) + if validation_error: + raise validation_error + try: + object_setattr(__pydantic_self__, "__dict__", values) + except TypeError as e: + raise TypeError( + "Model values must be a dict; you may not have returned a dictionary from a root validator" + ) from e + object_setattr(__pydantic_self__, "__fields_set__", fields_set) + __pydantic_self__._init_private_attributes() + + @no_type_check + def __setattr__(self, name, value): # noqa: C901 (ignore complexity) + if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES: + return object_setattr(self, name, value) + + if self.__config__.extra is not Extra.allow and name not in self.__fields__: + raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') + elif not self.__config__.allow_mutation or self.__config__.frozen: + raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') + elif name in self.__fields__ and self.__fields__[name].final: + raise TypeError( + f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment' + ) + elif self.__config__.validate_assignment: + new_values = {**self.__dict__, name: value} + + for validator in self.__pre_root_validators__: + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__) + + known_field = self.__fields__.get(name, None) + if known_field: + # We want to + # - make sure validators are called without the current value for this field inside `values` + # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts) + # - keep the order of the fields + if not known_field.field_info.allow_mutation: + raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned') + dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name} + value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + else: + new_values[name] = value + + errors = [] + for skip_on_failure, validator in self.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + if errors: + raise ValidationError(errors, self.__class__) + + # update the whole __dict__ as other values than just `value` + # may be changed (e.g. with `root_validator`) + object_setattr(self, "__dict__", new_values) + else: + self.__dict__[name] = value + + self.__fields_set__.add(name) + + def __getstate__(self) -> "DictAny": + private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) + return { + "__dict__": self.__dict__, + "__fields_set__": self.__fields_set__, + "__private_attribute_values__": {k: v for k, v in private_attrs if v is not Undefined}, + } + + def __setstate__(self, state: "DictAny") -> None: + object_setattr(self, "__dict__", state["__dict__"]) + object_setattr(self, "__fields_set__", state["__fields_set__"]) + for name, value in state.get("__private_attribute_values__", {}).items(): + object_setattr(self, name, value) + + def _init_private_attributes(self) -> None: + for name, private_attr in self.__private_attributes__.items(): + default = private_attr.get_default() + if default is not Undefined: + object_setattr(self, name, default) + + def dict( + self, + *, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> "DictStrAny": + """ + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + + return dict( + self._iter( + to_dict=True, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + ) + + def json( + self, + *, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + encoder: Optional[Callable[[Any], Any]] = None, + models_as_dict: bool = True, + **dumps_kwargs: Any, + ) -> str: + """ + Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. + + `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) + + # We don't directly call `self.dict()`, which does exactly this with `to_dict=True` + # because we want to be able to keep raw `BaseModel` instances and not as `dict`. + # This allows users to write custom JSON encoders for given `BaseModel` classes. + data = dict( + self._iter( + to_dict=models_as_dict, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + ) + if self.__custom_root_type__: + data = data[ROOT_KEY] + return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) + + @classmethod + def _enforce_dict_if_root(cls, obj: Any) -> Any: + if cls.__custom_root_type__ and ( + not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) + and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY}) + or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES + ): + return {ROOT_KEY: obj} + else: + return obj + + @classmethod + def parse_obj(cls: Type["Model"], obj: Any) -> "Model": + obj = cls._enforce_dict_if_root(obj) + if not isinstance(obj, dict): + try: + obj = dict(obj) + except (TypeError, ValueError) as e: + exc = TypeError(f"{cls.__name__} expected dict not {obj.__class__.__name__}") + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e + return cls(**obj) + + @classmethod + def parse_raw( + cls: Type["Model"], + b: StrBytes, + *, + content_type: str = None, + encoding: str = "utf8", + proto: Protocol = None, + allow_pickle: bool = False, + ) -> "Model": + try: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, + ) + except (ValueError, TypeError, UnicodeDecodeError) as e: + raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) + return cls.parse_obj(obj) + + @classmethod + def parse_file( + cls: Type["Model"], + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = "utf8", + proto: Protocol = None, + allow_pickle: bool = False, + ) -> "Model": + obj = load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, + ) + return cls.parse_obj(obj) + + @classmethod + def from_orm(cls: Type["Model"], obj: Any) -> "Model": + if not cls.__config__.orm_mode: + raise ConfigError("You must have the config attribute orm_mode=True to use from_orm") + obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj) + m = cls.__new__(cls) + values, fields_set, validation_error = validate_model(cls, obj) + if validation_error: + raise validation_error + object_setattr(m, "__dict__", values) + object_setattr(m, "__fields_set__", fields_set) + m._init_private_attributes() + return m + + @classmethod + def construct(cls: Type["Model"], _fields_set: Optional["SetStr"] = None, **values: Any) -> "Model": + """ + Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. + Default values are respected, but no other validation is performed. + Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + """ + m = cls.__new__(cls) + fields_values: Dict[str, Any] = {} + for name, field in cls.__fields__.items(): + if field.alt_alias and field.alias in values: + fields_values[name] = values[field.alias] + elif name in values: + fields_values[name] = values[name] + elif not field.required: + fields_values[name] = field.get_default() + fields_values.update(values) + object_setattr(m, "__dict__", fields_values) + if _fields_set is None: + _fields_set = set(values.keys()) + object_setattr(m, "__fields_set__", _fields_set) + m._init_private_attributes() + return m + + def _copy_and_set_values(self: "Model", values: "DictStrAny", fields_set: "SetStr", *, deep: bool) -> "Model": + if deep: + # chances of having empty dict here are quite low for using smart_deepcopy + values = deepcopy(values) + + cls = self.__class__ + m = cls.__new__(cls) + object_setattr(m, "__dict__", values) + object_setattr(m, "__fields_set__", fields_set) + for name in self.__private_attributes__: + value = getattr(self, name, Undefined) + if value is not Undefined: + if deep: + value = deepcopy(value) + object_setattr(m, name, value) + + return m + + def copy( + self: "Model", + *, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + update: Optional["DictStrAny"] = None, + deep: bool = False, + ) -> "Model": + """ + Duplicate a model, optionally choose which fields to include, exclude and change. + + :param include: fields to include in new model + :param exclude: fields to exclude from new model, as with values this takes precedence over include + :param update: values to change/add in the new model. Note: the data is not validated before creating + the new model: you should trust this data + :param deep: set to `True` to make a deep copy of the model + :return: new model instance + """ + + values = dict( + self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False), + **(update or {}), + ) + + # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg + if update: + fields_set = self.__fields_set__ | update.keys() + else: + fields_set = set(self.__fields_set__) + + return self._copy_and_set_values(values, fields_set, deep=deep) + + @classmethod + def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> "DictStrAny": + cached = cls.__schema_cache__.get((by_alias, ref_template)) + if cached is not None: + return cached + s = model_schema(cls, by_alias=by_alias, ref_template=ref_template) + cls.__schema_cache__[(by_alias, ref_template)] = s + return s + + @classmethod + def schema_json( + cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any + ) -> str: + from .json import pydantic_encoder + + return cls.__config__.json_dumps( + cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs + ) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls: Type["Model"], value: Any) -> "Model": + if isinstance(value, cls): + copy_on_model_validation = cls.__config__.copy_on_model_validation + # whether to deep or shallow copy the model on validation, None means do not copy + deep_copy: Optional[bool] = None + if copy_on_model_validation not in {"deep", "shallow", "none"}: + # Warn about deprecated behavior + warnings.warn( + "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning + ) + if copy_on_model_validation: + deep_copy = False + + if copy_on_model_validation == "shallow": + # shallow copy + deep_copy = False + elif copy_on_model_validation == "deep": + # deep copy + deep_copy = True + + if deep_copy is None: + return value + else: + return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy) + + value = cls._enforce_dict_if_root(value) + + if isinstance(value, dict): + return cls(**value) + elif cls.__config__.orm_mode: + return cls.from_orm(value) + else: + try: + value_as_dict = dict(value) + except (TypeError, ValueError) as e: + raise DictError() from e + return cls(**value_as_dict) + + @classmethod + def _decompose_class(cls: Type["Model"], obj: Any) -> GetterDict: + if isinstance(obj, GetterDict): + return obj + return cls.__config__.getter_dict(obj) + + @classmethod + @no_type_check + def _get_value( + cls, + v: Any, + to_dict: bool, + by_alias: bool, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]], + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]], + exclude_unset: bool, + exclude_defaults: bool, + exclude_none: bool, + ) -> Any: + if isinstance(v, BaseModel): + if to_dict: + v_dict = v.dict( + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=include, + exclude=exclude, + exclude_none=exclude_none, + ) + if ROOT_KEY in v_dict: + return v_dict[ROOT_KEY] + return v_dict + else: + return v.copy(include=include, exclude=exclude) + + value_exclude = ValueItems(v, exclude) if exclude else None + value_include = ValueItems(v, include) if include else None + + if isinstance(v, dict): + return { + k_: cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(k_), + exclude=value_exclude and value_exclude.for_element(k_), + exclude_none=exclude_none, + ) + for k_, v_ in v.items() + if (not value_exclude or not value_exclude.is_excluded(k_)) + and (not value_include or value_include.is_included(k_)) + } + + elif sequence_like(v): + seq_args = ( + cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(i), + exclude=value_exclude and value_exclude.for_element(i), + exclude_none=exclude_none, + ) + for i, v_ in enumerate(v) + if (not value_exclude or not value_exclude.is_excluded(i)) + and (not value_include or value_include.is_included(i)) + ) + + return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args) + + elif isinstance(v, Enum) and getattr(cls.Config, "use_enum_values", False): + return v.value + + else: + return v + + @classmethod + def __try_update_forward_refs__(cls, **localns: Any) -> None: + """ + Same as update_forward_refs but will not raise exception + when forward references are not defined. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,)) + + @classmethod + def update_forward_refs(cls, **localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this Model, globalns and localns. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns) + + def __iter__(self) -> "TupleGenerator": + """ + so `dict(model)` works + """ + yield from self.__dict__.items() + + def _iter( + self, + to_dict: bool = False, + by_alias: bool = False, + include: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + exclude: Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> "TupleGenerator": + # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. + # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. + if exclude is not None or self.__exclude_fields__ is not None: + exclude = ValueItems.merge(self.__exclude_fields__, exclude) + + if include is not None or self.__include_fields__ is not None: + include = ValueItems.merge(self.__include_fields__, include, intersect=True) + + allowed_keys = self._calculate_keys( + include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore + ) + if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): + # huge boost for plain _iter() + yield from self.__dict__.items() + return + + value_exclude = ValueItems(self, exclude) if exclude is not None else None + value_include = ValueItems(self, include) if include is not None else None + + for field_key, v in self.__dict__.items(): + if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): + continue + + if exclude_defaults: + model_field = self.__fields__.get(field_key) + if not getattr(model_field, "required", True) and getattr(model_field, "default", _missing) == v: + continue + + if by_alias and field_key in self.__fields__: + dict_key = self.__fields__[field_key].alias + else: + dict_key = field_key + + if to_dict or value_include or value_exclude: + v = self._get_value( + v, + to_dict=to_dict, + by_alias=by_alias, + include=value_include and value_include.for_element(field_key), + exclude=value_exclude and value_exclude.for_element(field_key), + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + yield dict_key, v + + def _calculate_keys( + self, + include: Optional["MappingIntStrAny"], + exclude: Optional["MappingIntStrAny"], + exclude_unset: bool, + update: Optional["DictStrAny"] = None, + ) -> Optional[AbstractSet[str]]: + if include is None and exclude is None and exclude_unset is False: + return None + + keys: AbstractSet[str] + if exclude_unset: + keys = self.__fields_set__.copy() + else: + keys = self.__dict__.keys() + + if include is not None: + keys &= include.keys() + + if update: + keys -= update.keys() + + if exclude: + keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)} + + return keys + + def __eq__(self, other: Any) -> bool: + if isinstance(other, BaseModel): + return self.dict() == other.dict() + else: + return self.dict() == other + + def __repr_args__(self) -> "ReprArgs": + return [ + (k, v) + for k, v in self.__dict__.items() + if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr) + ] + + +_is_base_model_class_defined = True + + +@overload +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: None = None, + __module__: str = __name__, + __validators__: Dict[str, "AnyClassMethod"] = None, + __cls_kwargs__: Dict[str, Any] = None, + **field_definitions: Any, +) -> Type["BaseModel"]: + ... + + +@overload +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[Type["Model"], Tuple[Type["Model"], ...]], + __module__: str = __name__, + __validators__: Dict[str, "AnyClassMethod"] = None, + __cls_kwargs__: Dict[str, Any] = None, + **field_definitions: Any, +) -> Type["Model"]: + ... + + +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[None, Type["Model"], Tuple[Type["Model"], ...]] = None, + __module__: str = __name__, + __validators__: Dict[str, "AnyClassMethod"] = None, + __cls_kwargs__: Dict[str, Any] = None, + __slots__: Optional[Tuple[str, ...]] = None, + **field_definitions: Any, +) -> Type["Model"]: + """ + Dynamically create a model. + :param __model_name: name of the created model + :param __config__: config class to use for the new model + :param __base__: base class for the new model to inherit from + :param __module__: module of the created model + :param __validators__: a dict of method names and @validator class methods + :param __cls_kwargs__: a dict for class creation + :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` + :param field_definitions: fields of the model (or extra fields if a base is supplied) + in the format `=(, )` or `=, e.g. + `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format + `=` or `=(, )`, e.g. + `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or + `foo=(str, FieldInfo(title='Foo'))` + """ + if __slots__ is not None: + # __slots__ will be ignored from here on + warnings.warn("__slots__ should not be passed to create_model", RuntimeWarning) + + if __base__ is not None: + if __config__ is not None: + raise ConfigError("to avoid confusion __config__ and __base__ cannot be used together") + if not isinstance(__base__, tuple): + __base__ = (__base__,) + else: + __base__ = (cast(Type["Model"], BaseModel),) + + __cls_kwargs__ = __cls_kwargs__ or {} + + fields = {} + annotations = {} + + for f_name, f_def in field_definitions.items(): + if not is_valid_field(f_name): + warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) + if isinstance(f_def, tuple): + try: + f_annotation, f_value = f_def + except ValueError as e: + raise ConfigError( + "field definitions should either be a tuple of (, ) or just a " + "default value, unfortunately this means tuples as " + "default values are not allowed" + ) from e + else: + f_annotation, f_value = None, f_def + + if f_annotation: + annotations[f_name] = f_annotation + fields[f_name] = f_value + + namespace: "DictStrAny" = {"__annotations__": annotations, "__module__": __module__} + if __validators__: + namespace.update(__validators__) + namespace.update(fields) + if __config__: + namespace["Config"] = inherit_config(__config__, BaseConfig) + resolved_bases = resolve_bases(__base__) + meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) + if resolved_bases is not __base__: + ns["__orig_bases__"] = __base__ + namespace.update(ns) + return meta(__model_name, resolved_bases, namespace, **kwds) + + +_missing = object() + + +def validate_model( # noqa: C901 (ignore complexity) + model: Type[BaseModel], input_data: "DictStrAny", cls: "ModelOrDc" = None +) -> Tuple["DictStrAny", "SetStr", Optional[ValidationError]]: + """ + validate data against a model. + """ + values = {} + errors = [] + # input_data names, possibly alias + names_used = set() + # field names, never aliases + fields_set = set() + config = model.__config__ + check_extra = config.extra is not Extra.ignore + cls_ = cls or model + + for validator in model.__pre_root_validators__: + try: + input_data = validator(cls_, input_data) + except (ValueError, TypeError, AssertionError) as exc: + return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) + + for name, field in model.__fields__.items(): + value = input_data.get(field.alias, _missing) + using_name = False + if value is _missing and config.allow_population_by_field_name and field.alt_alias: + value = input_data.get(field.name, _missing) + using_name = True + + if value is _missing: + if field.required: + errors.append(ErrorWrapper(MissingError(), loc=field.alias)) + continue + + value = field.get_default() + + if not config.validate_all and not field.validate_always: + values[name] = value + continue + else: + fields_set.add(name) + if check_extra: + names_used.add(field.name if using_name else field.alias) + + v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[name] = v_ + + if check_extra: + if isinstance(input_data, GetterDict): + extra = input_data.extra_keys() - names_used + else: + extra = input_data.keys() - names_used + if extra: + fields_set |= extra + if config.extra is Extra.allow: + for f in extra: + values[f] = input_data[f] + else: + for f in sorted(extra): + errors.append(ErrorWrapper(ExtraError(), loc=f)) + + for skip_on_failure, validator in model.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + values = validator(cls_, values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + + if errors: + return values, fields_set, ValidationError(errors, cls_) + else: + return values, fields_set, None diff --git a/vendor/pydantic/networks.py b/vendor/pydantic/networks.py new file mode 100644 index 0000000..d83ffd4 --- /dev/null +++ b/vendor/pydantic/networks.py @@ -0,0 +1,747 @@ +import re +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, + _BaseAddress, + _BaseNetwork, +) +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + Generator, + List, + Match, + Optional, + Pattern, + Set, + Tuple, + Type, + Union, + cast, + no_type_check, +) + +from . import errors +from .utils import Representation, update_not_none +from .validators import constr_length_validator, str_validator + +if TYPE_CHECKING: + import email_validator + from typing_extensions import TypedDict + + from .config import BaseConfig + from .fields import ModelField + from .typing import AnyCallable + + CallableGenerator = Generator[AnyCallable, None, None] + + class Parts(TypedDict, total=False): + scheme: str + user: Optional[str] + password: Optional[str] + ipv4: Optional[str] + ipv6: Optional[str] + domain: Optional[str] + port: Optional[str] + path: Optional[str] + query: Optional[str] + fragment: Optional[str] + + class HostParts(TypedDict, total=False): + host: str + tld: Optional[str] + host_type: Optional[str] + port: Optional[str] + rebuild: bool + +else: + email_validator = None + + class Parts(dict): + pass + + +NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] + +__all__ = [ + "AnyUrl", + "AnyHttpUrl", + "FileUrl", + "HttpUrl", + "stricturl", + "EmailStr", + "NameEmail", + "IPvAnyAddress", + "IPvAnyInterface", + "IPvAnyNetwork", + "PostgresDsn", + "CockroachDsn", + "AmqpDsn", + "RedisDsn", + "MongoDsn", + "KafkaDsn", + "validate_email", +] + +_url_regex_cache = None +_multi_host_url_regex_cache = None +_ascii_domain_regex_cache = None +_int_domain_regex_cache = None +_host_regex_cache = None + +_host_regex = ( + r"(?:" + r"(?P(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|" # ipv4 + r"(?P\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|" # ipv6 + r"(?P[^\s/:?#]+)" # domain, validation occurs later + r")?" + r"(?::(?P\d+))?" # port +) +_scheme_regex = r"(?:(?P[a-z][a-z0-9+\-.]+)://)?" # scheme https://tools.ietf.org/html/rfc3986#appendix-A +_user_info_regex = r"(?:(?P[^\s:/]*)(?::(?P[^\s/]*))?@)?" +_path_regex = r"(?P/[^\s?#]*)?" +_query_regex = r"(?:\?(?P[^\s#]*))?" +_fragment_regex = r"(?:#(?P[^\s#]*))?" + + +def url_regex() -> Pattern[str]: + global _url_regex_cache + if _url_regex_cache is None: + _url_regex_cache = re.compile( + rf"{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}", + re.IGNORECASE, + ) + return _url_regex_cache + + +def multi_host_url_regex() -> Pattern[str]: + """ + Compiled multi host url regex. + + Additionally to `url_regex` it allows to match multiple hosts. + E.g. host1.db.net,host2.db.net + """ + global _multi_host_url_regex_cache + if _multi_host_url_regex_cache is None: + _multi_host_url_regex_cache = re.compile( + rf"{_scheme_regex}{_user_info_regex}" + r"(?P([^/]*))" # validation occurs later + rf"{_path_regex}{_query_regex}{_fragment_regex}", + re.IGNORECASE, + ) + return _multi_host_url_regex_cache + + +def ascii_domain_regex() -> Pattern[str]: + global _ascii_domain_regex_cache + if _ascii_domain_regex_cache is None: + ascii_chunk = r"[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?" + ascii_domain_ending = r"(?P\.[a-z]{2,63})?\.?" + _ascii_domain_regex_cache = re.compile( + rf"(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}", re.IGNORECASE + ) + return _ascii_domain_regex_cache + + +def int_domain_regex() -> Pattern[str]: + global _int_domain_regex_cache + if _int_domain_regex_cache is None: + int_chunk = r"[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?" + int_domain_ending = r"(?P(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?" + _int_domain_regex_cache = re.compile(rf"(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}", re.IGNORECASE) + return _int_domain_regex_cache + + +def host_regex() -> Pattern[str]: + global _host_regex_cache + if _host_regex_cache is None: + _host_regex_cache = re.compile( + _host_regex, + re.IGNORECASE, + ) + return _host_regex_cache + + +class AnyUrl(str): + strip_whitespace = True + min_length = 1 + max_length = 2**16 + allowed_schemes: Optional[Collection[str]] = None + tld_required: bool = False + user_required: bool = False + host_required: bool = True + hidden_parts: Set[str] = set() + + __slots__ = ("scheme", "user", "password", "host", "tld", "host_type", "port", "path", "query", "fragment") + + @no_type_check + def __new__(cls, url: Optional[str], **kwargs) -> object: + return str.__new__(cls, cls.build(**kwargs) if url is None else url) + + def __init__( + self, + url: str, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: Optional[str] = None, + tld: Optional[str] = None, + host_type: str = "domain", + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + ) -> None: + str.__init__(url) + self.scheme = scheme + self.user = user + self.password = password + self.host = host + self.tld = tld + self.host_type = host_type + self.port = port + self.path = path + self.query = query + self.fragment = fragment + + @classmethod + def build( + cls, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: str, + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + **_kwargs: str, + ) -> str: + parts = Parts( + scheme=scheme, + user=user, + password=password, + host=host, + port=port, + path=path, + query=query, + fragment=fragment, + **_kwargs, # type: ignore[misc] + ) + + url = scheme + "://" + if user: + url += user + if password: + url += ":" + password + if user or password: + url += "@" + url += host + if port and ("port" not in cls.hidden_parts or cls.get_default_parts(parts).get("port") != port): + url += ":" + port + if path: + url += path + if query: + url += "?" + query + if fragment: + url += "#" + fragment + return url + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format="uri") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, value: Any, field: "ModelField", config: "BaseConfig") -> "AnyUrl": + if value.__class__ == cls: + return value + value = str_validator(value) + if cls.strip_whitespace: + value = value.strip() + url: str = cast(str, constr_length_validator(value, field, config)) + + m = cls._match_url(url) + # the regex should always match, if it doesn't please report with details of the URL tried + assert m, "URL regex failed unexpectedly" + + original_parts = cast("Parts", m.groupdict()) + parts = cls.apply_default_parts(original_parts) + parts = cls.validate_parts(parts) + + if m.end() != len(url): + raise errors.UrlExtraError(extra=url[m.end() :]) + + return cls._build_url(m, url, parts) + + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: "Parts") -> "AnyUrl": + """ + Validate hosts and build the AnyUrl object. Split from `validate` so this method + can be altered in `MultiHostDsn`. + """ + host, tld, host_type, rebuild = cls.validate_host(parts) + + return cls( + None if rebuild else url, + scheme=parts["scheme"], + user=parts["user"], + password=parts["password"], + host=host, + tld=tld, + host_type=host_type, + port=parts["port"], + path=parts["path"], + query=parts["query"], + fragment=parts["fragment"], + ) + + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return url_regex().match(url) + + @staticmethod + def _validate_port(port: Optional[str]) -> None: + if port is not None and int(port) > 65_535: + raise errors.UrlPortError() + + @classmethod + def validate_parts(cls, parts: "Parts", validate_port: bool = True) -> "Parts": + """ + A method used to validate parts of a URL. + Could be overridden to set default values for parts if missing + """ + scheme = parts["scheme"] + if scheme is None: + raise errors.UrlSchemeError() + + if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: + raise errors.UrlSchemePermittedError(set(cls.allowed_schemes)) + + if validate_port: + cls._validate_port(parts["port"]) + + user = parts["user"] + if cls.user_required and user is None: + raise errors.UrlUserInfoError() + + return parts + + @classmethod + def validate_host(cls, parts: "Parts") -> Tuple[str, Optional[str], str, bool]: + tld, host_type, rebuild = None, None, False + for f in ("domain", "ipv4", "ipv6"): + host = parts[f] # type: ignore[literal-required] + if host: + host_type = f + break + + if host is None: + if cls.host_required: + raise errors.UrlHostError() + elif host_type == "domain": + is_international = False + d = ascii_domain_regex().fullmatch(host) + if d is None: + d = int_domain_regex().fullmatch(host) + if d is None: + raise errors.UrlHostError() + is_international = True + + tld = d.group("tld") + if tld is None and not is_international: + d = int_domain_regex().fullmatch(host) + assert d is not None + tld = d.group("tld") + is_international = True + + if tld is not None: + tld = tld[1:] + elif cls.tld_required: + raise errors.UrlHostTldError() + + if is_international: + host_type = "int_domain" + rebuild = True + host = host.encode("idna").decode("ascii") + if tld is not None: + tld = tld.encode("idna").decode("ascii") + + return host, tld, host_type, rebuild # type: ignore + + @staticmethod + def get_default_parts(parts: "Parts") -> "Parts": + return {} + + @classmethod + def apply_default_parts(cls, parts: "Parts") -> "Parts": + for key, value in cls.get_default_parts(parts).items(): + if not parts[key]: # type: ignore[literal-required] + parts[key] = value # type: ignore[literal-required] + return parts + + def __repr__(self) -> str: + extra = ", ".join(f"{n}={getattr(self, n)!r}" for n in self.__slots__ if getattr(self, n) is not None) + return f"{self.__class__.__name__}({super().__repr__()}, {extra})" + + +class AnyHttpUrl(AnyUrl): + allowed_schemes = {"http", "https"} + + __slots__ = () + + +class HttpUrl(AnyHttpUrl): + tld_required = True + # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers + max_length = 2083 + hidden_parts = {"port"} + + @staticmethod + def get_default_parts(parts: "Parts") -> "Parts": + return {"port": "80" if parts["scheme"] == "http" else "443"} + + +class FileUrl(AnyUrl): + allowed_schemes = {"file"} + host_required = False + + __slots__ = () + + +class MultiHostDsn(AnyUrl): + __slots__ = AnyUrl.__slots__ + ("hosts",) + + def __init__(self, *args: Any, hosts: Optional[List["HostParts"]] = None, **kwargs: Any): + super().__init__(*args, **kwargs) + self.hosts = hosts + + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return multi_host_url_regex().match(url) + + @classmethod + def validate_parts(cls, parts: "Parts", validate_port: bool = True) -> "Parts": + return super().validate_parts(parts, validate_port=False) + + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: "Parts") -> "MultiHostDsn": + hosts_parts: List["HostParts"] = [] + host_re = host_regex() + for host in m.groupdict()["hosts"].split(","): + d: Parts = host_re.match(host).groupdict() # type: ignore + host, tld, host_type, rebuild = cls.validate_host(d) + port = d.get("port") + cls._validate_port(port) + hosts_parts.append( + { + "host": host, + "host_type": host_type, + "tld": tld, + "rebuild": rebuild, + "port": port, + } + ) + + if len(hosts_parts) > 1: + return cls( + None if any([hp["rebuild"] for hp in hosts_parts]) else url, + scheme=parts["scheme"], + user=parts["user"], + password=parts["password"], + path=parts["path"], + query=parts["query"], + fragment=parts["fragment"], + host_type=None, + hosts=hosts_parts, + ) + else: + # backwards compatibility with single host + host_part = hosts_parts[0] + return cls( + None if host_part["rebuild"] else url, + scheme=parts["scheme"], + user=parts["user"], + password=parts["password"], + host=host_part["host"], + tld=host_part["tld"], + host_type=host_part["host_type"], + port=host_part.get("port"), + path=parts["path"], + query=parts["query"], + fragment=parts["fragment"], + ) + + +class PostgresDsn(MultiHostDsn): + allowed_schemes = { + "postgres", + "postgresql", + "postgresql+asyncpg", + "postgresql+pg8000", + "postgresql+psycopg", + "postgresql+psycopg2", + "postgresql+psycopg2cffi", + "postgresql+py-postgresql", + "postgresql+pygresql", + } + user_required = True + + __slots__ = () + + +class CockroachDsn(AnyUrl): + allowed_schemes = { + "cockroachdb", + "cockroachdb+psycopg2", + "cockroachdb+asyncpg", + } + user_required = True + + +class AmqpDsn(AnyUrl): + allowed_schemes = {"amqp", "amqps"} + host_required = False + + +class RedisDsn(AnyUrl): + __slots__ = () + allowed_schemes = {"redis", "rediss"} + host_required = False + + @staticmethod + def get_default_parts(parts: "Parts") -> "Parts": + return { + "domain": "localhost" if not (parts["ipv4"] or parts["ipv6"]) else "", + "port": "6379", + "path": "/0", + } + + +class MongoDsn(AnyUrl): + allowed_schemes = {"mongodb"} + + # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes + @staticmethod + def get_default_parts(parts: "Parts") -> "Parts": + return { + "port": "27017", + } + + +class KafkaDsn(AnyUrl): + allowed_schemes = {"kafka"} + + @staticmethod + def get_default_parts(parts: "Parts") -> "Parts": + return { + "domain": "localhost", + "port": "9092", + } + + +def stricturl( + *, + strip_whitespace: bool = True, + min_length: int = 1, + max_length: int = 2**16, + tld_required: bool = True, + host_required: bool = True, + allowed_schemes: Optional[Collection[str]] = None, +) -> Type[AnyUrl]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + min_length=min_length, + max_length=max_length, + tld_required=tld_required, + host_required=host_required, + allowed_schemes=allowed_schemes, + ) + return type("UrlValue", (AnyUrl,), namespace) + + +def import_email_validator() -> None: + global email_validator + try: + import email_validator + except ImportError as e: + raise ImportError("email-validator is not installed, run `pip install pydantic[email]`") from e + + +class EmailStr(str): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="email") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + # included here and below so the error happens straight away + import_email_validator() + + yield str_validator + yield cls.validate + + @classmethod + def validate(cls, value: Union[str]) -> str: + return validate_email(value)[1] + + +class NameEmail(Representation): + __slots__ = "name", "email" + + def __init__(self, name: str, email: str): + self.name = name + self.email = email + + def __eq__(self, other: Any) -> bool: + return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="name-email") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + import_email_validator() + + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> "NameEmail": + if value.__class__ == cls: + return value + value = str_validator(value) + return cls(*validate_email(value)) + + def __str__(self) -> str: + return f"{self.name} <{self.email}>" + + +class IPvAnyAddress(_BaseAddress): + __slots__ = () + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="ipvanyaddress") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: + try: + return IPv4Address(value) + except ValueError: + pass + + try: + return IPv6Address(value) + except ValueError: + raise errors.IPvAnyAddressError() + + +class IPvAnyInterface(_BaseAddress): + __slots__ = () + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="ipvanyinterface") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: + try: + return IPv4Interface(value) + except ValueError: + pass + + try: + return IPv6Interface(value) + except ValueError: + raise errors.IPvAnyInterfaceError() + + +class IPvAnyNetwork(_BaseNetwork): # type: ignore + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="ipvanynetwork") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: + # Assume IP Network is defined with a default value for ``strict`` argument. + # Define your own class if you want to specify network address check strictness. + try: + return IPv4Network(value) + except ValueError: + pass + + try: + return IPv6Network(value) + except ValueError: + raise errors.IPvAnyNetworkError() + + +pretty_email_regex = re.compile(r"([\w ]*?) *<(.*)> *") +MAX_EMAIL_LENGTH = 2048 +"""Maximum length for an email. +A somewhat arbitrary but very generous number compared to what is allowed by most implementations. +""" + + +def validate_email(value: Union[str]) -> Tuple[str, str]: + """ + Email address validation using https://pypi.org/project/email-validator/ + Notes: + * raw ip address (literal) domain parts are not allowed. + * "John Doe " style "pretty" email addresses are processed + * spaces are striped from the beginning and end of addresses but no error is raised + """ + if email_validator is None: + import_email_validator() + + if len(value) > MAX_EMAIL_LENGTH: + raise errors.EmailError() + + m = pretty_email_regex.fullmatch(value) + name: Union[str, None] = None + if m: + name, value = m.groups() + email = value.strip() + try: + parts = email_validator.validate_email(email, check_deliverability=False) + except email_validator.EmailNotValidError as e: + raise errors.EmailError from e + + if hasattr(parts, "normalized"): + # email-validator >= 2 + email = parts.normalized + assert email is not None + name = name or parts.local_part + return name, email + else: + # email-validator >1, <2 + at_index = email.index("@") + local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. + global_part = email[at_index:].lower() + + return name or local_part, local_part + global_part diff --git a/vendor/pydantic/parse.py b/vendor/pydantic/parse.py new file mode 100644 index 0000000..b32ddc4 --- /dev/null +++ b/vendor/pydantic/parse.py @@ -0,0 +1,66 @@ +import json +import pickle +from enum import Enum +from pathlib import Path +from typing import Any, Callable, Union + +from .types import StrBytes + + +class Protocol(str, Enum): + json = "json" + pickle = "pickle" + + +def load_str_bytes( + b: StrBytes, + *, + content_type: str = None, + encoding: str = "utf8", + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + if proto is None and content_type: + if content_type.endswith(("json", "javascript")): + pass + elif allow_pickle and content_type.endswith("pickle"): + proto = Protocol.pickle + else: + raise TypeError(f"Unknown content-type: {content_type}") + + proto = proto or Protocol.json + + if proto == Protocol.json: + if isinstance(b, bytes): + b = b.decode(encoding) + return json_loads(b) + elif proto == Protocol.pickle: + if not allow_pickle: + raise RuntimeError("Trying to decode with pickle with allow_pickle=False") + bb = b if isinstance(b, bytes) else b.encode() + return pickle.loads(bb) + else: + raise TypeError(f"Unknown protocol: {proto}") + + +def load_file( + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = "utf8", + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + path = Path(path) + b = path.read_bytes() + if content_type is None: + if path.suffix in (".js", ".json"): + proto = Protocol.json + elif path.suffix == ".pkl": + proto = Protocol.pickle + + return load_str_bytes( + b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads + ) diff --git a/vendor/pydantic/schema.py b/vendor/pydantic/schema.py new file mode 100644 index 0000000..0f3d38f --- /dev/null +++ b/vendor/pydantic/schema.py @@ -0,0 +1,1163 @@ +import re +import warnings +from collections import defaultdict +from dataclasses import is_dataclass +from datetime import date, datetime, time, timedelta +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + ForwardRef, + FrozenSet, + Generic, + Iterable, + List, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from uuid import UUID + +from typing_extensions import Annotated, Literal + +from .fields import ( + MAPPING_LIKE_SHAPES, + SHAPE_DEQUE, + SHAPE_FROZENSET, + SHAPE_GENERIC, + SHAPE_ITERABLE, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + FieldInfo, + ModelField, +) +from .json import pydantic_encoder +from .networks import AnyUrl, EmailStr +from .types import ( + ConstrainedDecimal, + ConstrainedFloat, + ConstrainedFrozenSet, + ConstrainedInt, + ConstrainedList, + ConstrainedSet, + ConstrainedStr, + SecretBytes, + SecretStr, + StrictBytes, + StrictStr, + conbytes, + condecimal, + confloat, + confrozenset, + conint, + conlist, + conset, + constr, +) +from .typing import ( + all_literal_values, + get_args, + get_origin, + get_sub_types, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_union, +) +from .utils import ROOT_KEY, get_model, lenient_issubclass + +if TYPE_CHECKING: + from .dataclasses import Dataclass + from .main import BaseModel + +default_prefix = "#/definitions/" +default_ref_template = "#/definitions/{model}" + +TypeModelOrEnum = Union[Type["BaseModel"], Type[Enum]] +TypeModelSet = Set[TypeModelOrEnum] + + +def _apply_modify_schema( + modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any] +) -> None: + from inspect import signature + + sig = signature(modify_schema) + args = set(sig.parameters.keys()) + if "field" in args or "kwargs" in args: + modify_schema(field_schema, field=field) + else: + modify_schema(field_schema) + + +def schema( + models: Sequence[Union[Type["BaseModel"], Type["Dataclass"]]], + *, + by_alias: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` + top-level JSON key, including their sub-models. + + :param models: a list of models to include in the generated JSON Schema + :param by_alias: generate the schemas using the aliases defined, if any + :param title: title for the generated schema that includes the definitions + :param description: description for the generated schema + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful + for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For + a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for + the models and sub-models passed in ``models``. + """ + clean_models = [get_model(model) for model in models] + flat_models = get_flat_models_from_models(clean_models) + model_name_map = get_model_name_map(flat_models) + definitions = {} + output_schema: Dict[str, Any] = {} + if title: + output_schema["title"] = title + if description: + output_schema["description"] = description + for model in clean_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + definitions[model_name] = m_schema + if definitions: + output_schema["definitions"] = definitions + return output_schema + + +def model_schema( + model: Union[Type["BaseModel"], Type["Dataclass"]], + by_alias: bool = True, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level + JSON key. + + :param model: a Pydantic model (a class that inherits from BaseModel) + :param by_alias: generate the schemas using the aliases defined, if any + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema for the passed ``model`` + """ + model = get_model(model) + flat_models = get_flat_models_from_model(model) + model_name_map = get_model_name_map(flat_models) + model_name = model_name_map[model] + m_schema, m_definitions, nested_models = model_process_schema( + model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template + ) + if model_name in nested_models: + # model_name is in Nested models, it has circular references + m_definitions[model_name] = m_schema + m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) + if m_definitions: + m_schema.update({"definitions": m_definitions}) + return m_schema + + +def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: + # If no title is explicitly set, we don't set title in the schema for enums. + # The behaviour is the same as `BaseModel` reference, where the default title + # is in the definitions part of the schema. + schema_: Dict[str, Any] = {} + if field.field_info.title or not lenient_issubclass(field.type_, Enum): + schema_["title"] = field.field_info.title or field.alias.title().replace("_", " ") + + if field.field_info.title: + schema_overrides = True + + if field.field_info.description: + schema_["description"] = field.field_info.description + schema_overrides = True + + if not field.required and field.default is not None and not is_callable_type(field.outer_type_): + schema_["default"] = encode_default(field.default) + schema_overrides = True + + return schema_, schema_overrides + + +def field_schema( + field: ModelField, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: Optional[TypeModelSet] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. + Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field + is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they + will be included in the definitions and referenced in the schema instead of included recursively. + + :param field: a Pydantic ``ModelField`` + :param by_alias: use the defined alias (if any) in the returned schema + :param model_name_map: used to generate the JSON Schema references to other models included in the definitions + :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of + #/definitions/ will be used + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :param known_models: used to solve circular references + :return: tuple of the schema for this field and additional definitions + """ + s, schema_overrides = get_field_info_schema(field) + + validation_schema = get_field_schema_validations(field) + if validation_schema: + s.update(validation_schema) + schema_overrides = True + + f_schema, f_definitions, f_nested_models = field_type_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models or set(), + ) + + # $ref will only be returned when there are no schema_overrides + if "$ref" in f_schema: + return f_schema, f_definitions, f_nested_models + else: + s.update(f_schema) + return s, f_definitions, f_nested_models + + +numeric_types = (int, float, Decimal) +_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ("max_length", numeric_types, "maxLength"), + ("min_length", numeric_types, "minLength"), + ("regex", str, "pattern"), +) + +_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ("gt", numeric_types, "exclusiveMinimum"), + ("lt", numeric_types, "exclusiveMaximum"), + ("ge", numeric_types, "minimum"), + ("le", numeric_types, "maximum"), + ("multiple_of", numeric_types, "multipleOf"), +) + + +def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: + """ + Get the JSON Schema validation keywords for a ``field`` with an annotation of + a Pydantic ``FieldInfo`` with validation arguments. + """ + f_schema: Dict[str, Any] = {} + + if lenient_issubclass(field.type_, Enum): + # schema is already updated by `enum_process_schema`; just update with field extra + if field.field_info.extra: + f_schema.update(field.field_info.extra) + return f_schema + + if lenient_issubclass(field.type_, (str, bytes)): + for attr_name, t, keyword in _str_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): + for attr_name, t, keyword in _numeric_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if field.field_info is not None and field.field_info.const: + f_schema["const"] = field.default + if field.field_info.extra: + f_schema.update(field.field_info.extra) + modify_schema = getattr(field.outer_type_, "__modify_schema__", None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema + + +def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: + """ + Process a set of models and generate unique names for them to be used as keys in the JSON Schema + definitions. By default the names are the same as the class name. But if two models in different Python + modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be + based on the Python module path for those conflicting models to prevent name collisions. + + :param unique_models: a Python set of models + :return: dict mapping models to names + """ + name_model_map = {} + conflicting_names: Set[str] = set() + for model in unique_models: + model_name = normalize_name(model.__name__) + if model_name in conflicting_names: + model_name = get_long_model_name(model) + name_model_map[model_name] = model + elif model_name in name_model_map: + conflicting_names.add(model_name) + conflicting_model = name_model_map.pop(model_name) + name_model_map[get_long_model_name(conflicting_model)] = conflicting_model + name_model_map[get_long_model_name(model)] = model + else: + name_model_map[model_name] = model + return {v: k for k, v in name_model_map.items()} + + +def get_flat_models_from_model(model: Type["BaseModel"], known_models: Optional[TypeModelSet] = None) -> TypeModelSet: + """ + Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass + model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also + subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), + the return value will be ``set([Foo, Bar, Baz])``. + + :param model: a Pydantic ``BaseModel`` subclass + :param known_models: used to solve circular references + :return: a set with the initial model and all its sub-models + """ + known_models = known_models or set() + flat_models: TypeModelSet = set() + flat_models.add(model) + known_models |= flat_models + fields = cast(Sequence[ModelField], model.__fields__.values()) + flat_models |= get_flat_models_from_fields(fields, known_models=known_models) + return flat_models + + +def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: + """ + Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel + (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. + I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that + model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of + type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param field: a Pydantic ``ModelField`` + :param known_models: used to solve circular references + :return: a set with the model used in the declaration for this field, if any, and all its sub-models + """ + from .main import BaseModel + + flat_models: TypeModelSet = set() + + field_type = field.type_ + if lenient_issubclass(getattr(field_type, "__pydantic_model__", None), BaseModel): + field_type = field_type.__pydantic_model__ + + if field.sub_fields and not lenient_issubclass(field_type, BaseModel): + flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) + elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: + flat_models |= get_flat_models_from_model(field_type, known_models=known_models) + elif lenient_issubclass(field_type, Enum): + flat_models.add(field_type) + return flat_models + + +def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: + """ + Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel`` + (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. + I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a + field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also + subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param fields: a list of Pydantic ``ModelField``s + :param known_models: used to solve circular references + :return: a set with any model declared in the fields, and all their sub-models + """ + flat_models: TypeModelSet = set() + for field in fields: + flat_models |= get_flat_models_from_field(field, known_models=known_models) + return flat_models + + +def get_flat_models_from_models(models: Sequence[Type["BaseModel"]]) -> TypeModelSet: + """ + Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass + a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has + a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + """ + flat_models: TypeModelSet = set() + for model in models: + flat_models |= get_flat_models_from_model(model) + return flat_models + + +def get_long_model_name(model: TypeModelOrEnum) -> str: + return f"{model.__module__}__{model.__qualname__}".replace(".", "__") + + +def field_type_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``field_schema()``, you probably should be using that function. + + Take a single ``field`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + from .main import BaseModel # noqa: F811 + + definitions = {} + nested_models: Set[str] = set() + f_schema: Dict[str, Any] + if field.shape in { + SHAPE_LIST, + SHAPE_TUPLE_ELLIPSIS, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_ITERABLE, + SHAPE_DEQUE, + }: + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + f_schema = {"type": "array", "items": items_schema} + if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: + f_schema["uniqueItems"] = True + + elif field.shape in MAPPING_LIKE_SHAPES: + f_schema = {"type": "object"} + key_field = cast(ModelField, field.key_field) + regex = getattr(key_field.type_, "regex", None) + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if regex: + # Dict keys have a regex pattern + # items_schema might be a schema or empty dict, add it either way + f_schema["patternProperties"] = {ConstrainedStr._get_pattern(regex): items_schema} + if items_schema: + # The dict values are not simply Any, so they need a schema + f_schema["additionalProperties"] = items_schema + elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): + sub_schema = [] + sub_fields = cast(List[ModelField], field.sub_fields) + for sf in sub_fields: + sf_schema, sf_definitions, sf_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sf_definitions) + nested_models.update(sf_nested_models) + sub_schema.append(sf_schema) + + sub_fields_len = len(sub_fields) + if field.shape == SHAPE_GENERIC: + all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {"type": "array", "items": sub_schema} + f_schema = {"allOf": [all_of_schemas]} + else: + f_schema = { + "type": "array", + "minItems": sub_fields_len, + "maxItems": sub_fields_len, + } + if sub_fields_len >= 1: + f_schema["items"] = sub_schema + else: + assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape + f_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + + # check field type to avoid repeated calls to the same __modify_schema__ method + if field.type_ != field.outer_type_: + if field.shape == SHAPE_GENERIC: + field_type = field.type_ + else: + field_type = field.outer_type_ + modify_schema = getattr(field_type, "__modify_schema__", None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema, definitions, nested_models + + +def model_process_schema( + model: TypeModelOrEnum, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: Optional[TypeModelSet] = None, + field: Optional[ModelField] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``model_schema()``, you probably should be using that function. + + Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The + sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All + the definitions are returned as the second value. + """ + from inspect import getdoc, signature + + known_models = known_models or set() + if lenient_issubclass(model, Enum): + model = cast(Type[Enum], model) + s = enum_process_schema(model, field=field) + return s, {}, set() + model = cast(Type["BaseModel"], model) + s = {"title": model.__config__.title or model.__name__} + doc = getdoc(model) + if doc: + s["description"] = doc + known_models.add(model) + m_schema, m_definitions, nested_models = model_type_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + s.update(m_schema) + schema_extra = model.__config__.schema_extra + if callable(schema_extra): + if len(signature(schema_extra).parameters) == 1: + schema_extra(s) + else: + schema_extra(s, model) + else: + s.update(schema_extra) + return s, m_definitions, nested_models + + +def model_type_schema( + model: Type["BaseModel"], + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + You probably should be using ``model_schema()``, this function is indirectly used by that function. + + Take a single ``model`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + properties = {} + required = [] + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + for k, f in model.__fields__.items(): + try: + f_schema, f_definitions, f_nested_models = field_schema( + f, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + except SkipField as skip: + warnings.warn(skip.message, UserWarning) + continue + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if by_alias: + properties[f.alias] = f_schema + if f.required: + required.append(f.alias) + else: + properties[k] = f_schema + if f.required: + required.append(k) + if ROOT_KEY in properties: + out_schema = properties[ROOT_KEY] + out_schema["title"] = model.__config__.title or model.__name__ + else: + out_schema = {"type": "object", "properties": properties} + if required: + out_schema["required"] = required + if model.__config__.extra == "forbid": + out_schema["additionalProperties"] = False + return out_schema, definitions, nested_models + + +def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: + """ + Take a single `enum` and generate its schema. + + This is similar to the `model_process_schema` function, but applies to ``Enum`` objects. + """ + import inspect + + schema_: Dict[str, Any] = { + "title": enum.__name__, + # Python assigns all enums a default docstring value of 'An enumeration', so + # all enums will have a description field even if not explicitly provided. + "description": inspect.cleandoc(enum.__doc__ or "An enumeration."), + # Add enum values and the enum field type to the schema. + "enum": [item.value for item in cast(Iterable[Enum], enum)], + } + + add_field_type_to_schema(enum, schema_) + + modify_schema = getattr(enum, "__modify_schema__", None) + if modify_schema: + _apply_modify_schema(modify_schema, field, schema_) + + return schema_ + + +def field_singleton_sub_fields_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you probably should be using that function. + + Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their + schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. + """ + sub_fields = cast(List[ModelField], field.sub_fields) + definitions = {} + nested_models: Set[str] = set() + if len(sub_fields) == 1: + return field_type_schema( + sub_fields[0], + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + else: + s: Dict[str, Any] = {} + # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object + field_has_discriminator: bool = field.discriminator_key is not None + if field_has_discriminator: + assert field.sub_fields_mapping is not None + + discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} + + for discriminator_value, sub_field in field.sub_fields_mapping.items(): + if isinstance(discriminator_value, Enum): + discriminator_value = str(discriminator_value.value) + # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many + if is_union(get_origin(sub_field.type_)): + sub_models = get_sub_types(sub_field.type_) + discriminator_models_refs[discriminator_value] = { + model_name_map[sub_model]: get_schema_ref( + model_name_map[sub_model], ref_prefix, ref_template, False + ) + for sub_model in sub_models + } + else: + sub_field_type = sub_field.type_ + if hasattr(sub_field_type, "__pydantic_model__"): + sub_field_type = sub_field_type.__pydantic_model__ + + discriminator_model_name = model_name_map[sub_field_type] + discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) + discriminator_models_refs[discriminator_value] = discriminator_model_ref["$ref"] + + s["discriminator"] = { + "propertyName": field.discriminator_alias, + "mapping": discriminator_models_refs, + } + + sub_field_schemas = [] + for sf in sub_fields: + sub_schema, sub_definitions, sub_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sub_definitions) + if schema_overrides and "allOf" in sub_schema: + # if the sub_field is a referenced schema we only need the referenced + # object. Otherwise we will end up with several allOf inside anyOf/oneOf. + # See https://github.com/pydantic/pydantic/issues/1209 + sub_schema = sub_schema["allOf"][0] + + if sub_schema.keys() == {"discriminator", "oneOf"}: + # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere + sub_schema.pop("discriminator") + sub_field_schemas.append(sub_schema) + nested_models.update(sub_nested_models) + s["oneOf" if field_has_discriminator else "anyOf"] = sub_field_schemas + return s, definitions, nested_models + + +# Order is important, e.g. subclasses of str must go before str +# this is used only for standard library types, custom types should use __modify_schema__ instead +field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( + (Path, {"type": "string", "format": "path"}), + (datetime, {"type": "string", "format": "date-time"}), + (date, {"type": "string", "format": "date"}), + (time, {"type": "string", "format": "time"}), + (timedelta, {"type": "number", "format": "time-delta"}), + (IPv4Network, {"type": "string", "format": "ipv4network"}), + (IPv6Network, {"type": "string", "format": "ipv6network"}), + (IPv4Interface, {"type": "string", "format": "ipv4interface"}), + (IPv6Interface, {"type": "string", "format": "ipv6interface"}), + (IPv4Address, {"type": "string", "format": "ipv4"}), + (IPv6Address, {"type": "string", "format": "ipv6"}), + (Pattern, {"type": "string", "format": "regex"}), + (str, {"type": "string"}), + (bytes, {"type": "string", "format": "binary"}), + (bool, {"type": "boolean"}), + (int, {"type": "integer"}), + (float, {"type": "number"}), + (Decimal, {"type": "number"}), + (UUID, {"type": "string", "format": "uuid"}), + (dict, {"type": "object"}), + (list, {"type": "array", "items": {}}), + (tuple, {"type": "array", "items": {}}), + (set, {"type": "array", "items": {}, "uniqueItems": True}), + (frozenset, {"type": "array", "items": {}, "uniqueItems": True}), +) + +json_scheme = {"type": "string", "format": "json-string"} + + +def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: + """ + Update the given `schema` with the type-specific metadata for the given `field_type`. + + This function looks through `field_class_to_schema` for a class that matches the given `field_type`, + and then modifies the given `schema` with the information from that type. + """ + for type_, t_schema in field_class_to_schema: + # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class + if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: + schema_.update(t_schema) + break + + +def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: + if ref_prefix: + schema_ref = {"$ref": ref_prefix + name} + else: + schema_ref = {"$ref": ref_template.format(model=name)} + return {"allOf": [schema_ref]} if schema_overrides else schema_ref + + +def field_singleton_schema( # noqa: C901 (ignore complexity) + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you should probably be using that function. + + Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. + """ + from .main import BaseModel + + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + field_type = field.type_ + + # Recurse into this field if it contains sub_fields and is NOT a + # BaseModel OR that BaseModel is a const + if field.sub_fields and ( + (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel) + ): + return field_singleton_sub_fields_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: + return {}, definitions, nested_models # no restrictions + if is_none_type(field_type): + return {"type": "null"}, definitions, nested_models + if is_callable_type(field_type): + raise SkipField(f"Callable {field.name} was excluded from schema since JSON schema has no equivalent type.") + f_schema: Dict[str, Any] = {} + if field.field_info is not None and field.field_info.const: + f_schema["const"] = field.default + + if is_literal_type(field_type): + values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type)) + + if len({v.__class__ for v in values}) > 1: + return field_schema( + multitypes_literal_field_for_schema(values, field), + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + + # All values have the same type + field_type = values[0].__class__ + f_schema["enum"] = list(values) + add_field_type_to_schema(field_type, f_schema) + elif lenient_issubclass(field_type, Enum): + enum_name = model_name_map[field_type] + f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) + f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) + definitions[enum_name] = enum_process_schema(field_type, field=field) + elif is_namedtuple(field_type): + sub_schema, *_ = model_process_schema( + field_type.__pydantic_model__, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + items_schemas = list(sub_schema["properties"].values()) + f_schema.update( + { + "type": "array", + "items": items_schemas, + "minItems": len(items_schemas), + "maxItems": len(items_schemas), + } + ) + elif not hasattr(field_type, "__pydantic_model__"): + add_field_type_to_schema(field_type, f_schema) + + modify_schema = getattr(field_type, "__modify_schema__", None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + + if f_schema: + return f_schema, definitions, nested_models + + # Handle dataclass-based models + if lenient_issubclass(getattr(field_type, "__pydantic_model__", None), BaseModel): + field_type = field_type.__pydantic_model__ + + if issubclass(field_type, BaseModel): + model_name = model_name_map[field_type] + if field_type not in known_models: + sub_schema, sub_definitions, sub_nested_models = model_process_schema( + field_type, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + definitions.update(sub_definitions) + definitions[model_name] = sub_schema + nested_models.update(sub_nested_models) + else: + nested_models.add(model_name) + schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) + return schema_ref, definitions, nested_models + + # For generics with no args + args = get_args(field_type) + if args is not None and not args and Generic in field_type.__bases__: + return f_schema, definitions, nested_models + + raise ValueError(f"Value not declarable with JSON Schema, field: {field}") + + +def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: + """ + To support `Literal` with values of different types, we split it into multiple `Literal` with same type + e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]` + """ + literal_distinct_types = defaultdict(list) + for v in values: + literal_distinct_types[v.__class__].append(v) + distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) + + return ModelField( + name=field.name, + type_=Union[tuple(distinct_literals)], # type: ignore + class_validators=field.class_validators, + model_config=field.model_config, + default=field.default, + required=field.required, + alias=field.alias, + field_info=field.field_info, + ) + + +def encode_default(dft: Any) -> Any: + from .main import BaseModel + + if isinstance(dft, BaseModel) or is_dataclass(dft): + dft = cast("dict[str, Any]", pydantic_encoder(dft)) + + if isinstance(dft, dict): + return {encode_default(k): encode_default(v) for k, v in dft.items()} + elif isinstance(dft, Enum): + return dft.value + elif isinstance(dft, (int, float, str)): + return dft + elif isinstance(dft, (list, tuple)): + t = dft.__class__ + seq_args = (encode_default(v) for v in dft) + return t(*seq_args) if is_namedtuple(t) else t(seq_args) + elif dft is None: + return None + else: + return pydantic_encoder(dft) + + +_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} + + +def get_annotation_from_field_info( + annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False +) -> Type[Any]: + """ + Get an annotation with validation implemented for numbers and strings based on the field_info. + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :param field_name: name of the field for use in error messages + :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment + :return: the same ``annotation`` if unmodified or a new annotation with validation in place + """ + constraints = field_info.get_constraints() + used_constraints: Set[str] = set() + if constraints: + annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) + if validate_assignment: + used_constraints.add("allow_mutation") + + unused_constraints = constraints - used_constraints + if unused_constraints: + raise ValueError( + f'On field "{field_name}" the following field constraints are set but not enforced: ' + f'{", ".join(unused_constraints)}. ' + f"\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints" + ) + + return annotation + + +def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 + """ + Get an annotation with used constraints implemented for numbers and strings based on the field_info. + + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints. + """ + used_constraints: Set[str] = set() + + def go(type_: Any) -> Type[Any]: + if ( + is_literal_type(type_) + or isinstance(type_, ForwardRef) + or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet)) + ): + return type_ + origin = get_origin(type_) + if origin is not None: + args: Tuple[Any, ...] = get_args(type_) + if any(isinstance(a, ForwardRef) for a in args): + # forward refs cause infinite recursion below + return type_ + + if origin is Annotated: + return go(args[0]) + if is_union(origin): + return Union[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, List) and ( + field_info.min_items is not None + or field_info.max_items is not None + or field_info.unique_items is not None + ): + used_constraints.update({"min_items", "max_items", "unique_items"}) + return conlist( + go(args[0]), + min_items=field_info.min_items, + max_items=field_info.max_items, + unique_items=field_info.unique_items, + ) + + if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({"min_items", "max_items"}) + return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({"min_items", "max_items"}) + return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + for t in (Tuple, List, Set, FrozenSet, Sequence): + if issubclass(origin, t): # type: ignore + return t[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, Dict): + return Dict[args[0], go(args[1])] # type: ignore + + attrs: Optional[Tuple[str, ...]] = None + constraint_func: Optional[Callable[..., type]] = None + if isinstance(type_, type): + if issubclass(type_, (SecretStr, SecretBytes)): + attrs = ("max_length", "min_length") + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): + attrs = ("max_length", "min_length", "regex") + if issubclass(type_, StrictStr): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = constr + elif issubclass(type_, bytes): + attrs = ("max_length", "min_length", "regex") + if issubclass(type_, StrictBytes): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = conbytes + elif issubclass(type_, numeric_types) and not issubclass( + type_, + ( + ConstrainedInt, + ConstrainedFloat, + ConstrainedDecimal, + ConstrainedList, + ConstrainedSet, + ConstrainedFrozenSet, + bool, + ), + ): + # Is numeric type + attrs = ("gt", "lt", "ge", "le", "multiple_of") + if issubclass(type_, float): + attrs += ("allow_inf_nan",) + if issubclass(type_, Decimal): + attrs += ("max_digits", "decimal_places") + numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch + constraint_func = _map_types_constraint[numeric_type] + + if attrs: + used_constraints.update(set(attrs)) + kwargs = { + attr_name: attr + for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) + if attr is not None + } + if kwargs: + constraint_func = cast(Callable[..., type], constraint_func) + return constraint_func(**kwargs) + return type_ + + return go(annotation), used_constraints + + +def normalize_name(name: str) -> str: + """ + Normalizes the given name. This can be applied to either a model *or* enum. + """ + return re.sub(r"[^a-zA-Z0-9.\-_]", "_", name) + + +class SkipField(Exception): + """ + Utility exception used to exclude fields from schema. + """ + + def __init__(self, message: str) -> None: + self.message = message diff --git a/vendor/pydantic/types.py b/vendor/pydantic/types.py new file mode 100644 index 0000000..7eea0a5 --- /dev/null +++ b/vendor/pydantic/types.py @@ -0,0 +1,1205 @@ +import abc +import math +import re +import warnings +from datetime import date +from decimal import Decimal, InvalidOperation +from enum import Enum +from pathlib import Path +from types import new_class +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + FrozenSet, + List, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) +from uuid import UUID +from weakref import WeakSet + +from . import errors +from .datetime_parse import parse_date +from .utils import import_string, update_not_none +from .validators import ( + bytes_validator, + constr_length_validator, + constr_lower, + constr_strip_whitespace, + constr_upper, + decimal_validator, + float_finite_validator, + float_validator, + frozenset_validator, + int_validator, + list_validator, + number_multiple_validator, + number_size_validator, + path_exists_validator, + path_validator, + set_validator, + str_validator, + strict_bytes_validator, + strict_float_validator, + strict_int_validator, + strict_str_validator, +) + +__all__ = [ + "NoneStr", + "NoneBytes", + "StrBytes", + "NoneStrBytes", + "StrictStr", + "ConstrainedBytes", + "conbytes", + "ConstrainedList", + "conlist", + "ConstrainedSet", + "conset", + "ConstrainedFrozenSet", + "confrozenset", + "ConstrainedStr", + "constr", + "PyObject", + "ConstrainedInt", + "conint", + "PositiveInt", + "NegativeInt", + "NonNegativeInt", + "NonPositiveInt", + "ConstrainedFloat", + "confloat", + "PositiveFloat", + "NegativeFloat", + "NonNegativeFloat", + "NonPositiveFloat", + "FiniteFloat", + "ConstrainedDecimal", + "condecimal", + "UUID1", + "UUID3", + "UUID4", + "UUID5", + "FilePath", + "DirectoryPath", + "Json", + "JsonWrapper", + "SecretField", + "SecretStr", + "SecretBytes", + "StrictBool", + "StrictBytes", + "StrictInt", + "StrictFloat", + "PaymentCardNumber", + "ByteSize", + "PastDate", + "FutureDate", + "ConstrainedDate", + "condate", +] + +NoneStr = Optional[str] +NoneBytes = Optional[bytes] +StrBytes = Union[str, bytes] +NoneStrBytes = Optional[StrBytes] +OptionalInt = Optional[int] +OptionalIntFloat = Union[OptionalInt, float] +OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] +OptionalDate = Optional[date] +StrIntFloat = Union[str, int, float] + +if TYPE_CHECKING: + from typing_extensions import Annotated + + from .dataclasses import Dataclass + from .main import BaseModel + from .typing import CallableGenerator + + ModelOrDc = Type[Union[BaseModel, Dataclass]] + +T = TypeVar("T") +_DEFINED_TYPES: "WeakSet[type]" = WeakSet() + + +@overload +def _registered(typ: Type[T]) -> Type[T]: + pass + + +@overload +def _registered(typ: "ConstrainedNumberMeta") -> "ConstrainedNumberMeta": + pass + + +def _registered(typ: Union[Type[T], "ConstrainedNumberMeta"]) -> Union[Type[T], "ConstrainedNumberMeta"]: + # In order to generate valid examples of constrained types, Hypothesis needs + # to inspect the type object - so we keep a weakref to each contype object + # until it can be registered. When (or if) our Hypothesis plugin is loaded, + # it monkeypatches this function. + # If Hypothesis is never used, the total effect is to keep a weak reference + # which has minimal memory usage and doesn't even affect garbage collection. + _DEFINED_TYPES.add(typ) + return typ + + +class ConstrainedNumberMeta(type): + def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> "ConstrainedInt": # type: ignore + new_cls = cast("ConstrainedInt", type.__new__(cls, name, bases, dct)) + + if new_cls.gt is not None and new_cls.ge is not None: + raise errors.ConfigError("bounds gt and ge cannot be specified at the same time") + if new_cls.lt is not None and new_cls.le is not None: + raise errors.ConfigError("bounds lt and le cannot be specified at the same time") + + return _registered(new_cls) # type: ignore + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + StrictBool = bool +else: + + class StrictBool(int): + """ + StrictBool to allow for bools which are not type-coerced. + """ + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="boolean") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> bool: + """ + Ensure that we only allow bools. + """ + if isinstance(value, bool): + return value + + raise errors.StrictBoolError() + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalInt = None + ge: OptionalInt = None + lt: OptionalInt = None + le: OptionalInt = None + multiple_of: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield strict_int_validator if cls.strict else int_validator + yield number_size_validator + yield number_multiple_validator + + +def conint( + *, + strict: bool = False, + gt: Optional[int] = None, + ge: Optional[int] = None, + lt: Optional[int] = None, + le: Optional[int] = None, + multiple_of: Optional[int] = None, +) -> Type[int]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) + return type("ConstrainedIntValue", (ConstrainedInt,), namespace) + + +if TYPE_CHECKING: + PositiveInt = int + NegativeInt = int + NonPositiveInt = int + NonNegativeInt = int + StrictInt = int +else: + + class PositiveInt(ConstrainedInt): + gt = 0 + + class NegativeInt(ConstrainedInt): + lt = 0 + + class NonPositiveInt(ConstrainedInt): + le = 0 + + class NonNegativeInt(ConstrainedInt): + ge = 0 + + class StrictInt(ConstrainedInt): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalIntFloat = None + ge: OptionalIntFloat = None + lt: OptionalIntFloat = None + le: OptionalIntFloat = None + multiple_of: OptionalIntFloat = None + allow_inf_nan: Optional[bool] = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + # Modify constraints to account for differences between IEEE floats and JSON + if field_schema.get("exclusiveMinimum") == -math.inf: + del field_schema["exclusiveMinimum"] + if field_schema.get("minimum") == -math.inf: + del field_schema["minimum"] + if field_schema.get("exclusiveMaximum") == math.inf: + del field_schema["exclusiveMaximum"] + if field_schema.get("maximum") == math.inf: + del field_schema["maximum"] + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield strict_float_validator if cls.strict else float_validator + yield number_size_validator + yield number_multiple_validator + yield float_finite_validator + + +def confloat( + *, + strict: bool = False, + gt: float = None, + ge: float = None, + lt: float = None, + le: float = None, + multiple_of: float = None, + allow_inf_nan: Optional[bool] = None, +) -> Type[float]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan) + return type("ConstrainedFloatValue", (ConstrainedFloat,), namespace) + + +if TYPE_CHECKING: + PositiveFloat = float + NegativeFloat = float + NonPositiveFloat = float + NonNegativeFloat = float + StrictFloat = float + FiniteFloat = float +else: + + class PositiveFloat(ConstrainedFloat): + gt = 0 + + class NegativeFloat(ConstrainedFloat): + lt = 0 + + class NonPositiveFloat(ConstrainedFloat): + le = 0 + + class NonNegativeFloat(ConstrainedFloat): + ge = 0 + + class StrictFloat(ConstrainedFloat): + strict = True + + class FiniteFloat(ConstrainedFloat): + allow_inf_nan = False + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedBytes(bytes): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + strict: bool = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield strict_bytes_validator if cls.strict else bytes_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + + +def conbytes( + *, + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + strict: bool = False, +) -> Type[bytes]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + min_length=min_length, + max_length=max_length, + strict=strict, + ) + return _registered(type("ConstrainedBytesValue", (ConstrainedBytes,), namespace)) + + +if TYPE_CHECKING: + StrictBytes = bytes +else: + + class StrictBytes(ConstrainedBytes): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedStr(str): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + curtail_length: OptionalInt = None + regex: Optional[Union[str, Pattern[str]]] = None + strict = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + minLength=cls.min_length, + maxLength=cls.max_length, + pattern=cls.regex and cls._get_pattern(cls.regex), + ) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield strict_str_validator if cls.strict else str_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + yield cls.validate + + @classmethod + def validate(cls, value: Union[str]) -> Union[str]: + if cls.curtail_length and len(value) > cls.curtail_length: + value = value[: cls.curtail_length] + + if cls.regex: + if not re.match(cls.regex, value): + raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex)) + + return value + + @staticmethod + def _get_pattern(regex: Union[str, Pattern[str]]) -> str: + return regex if isinstance(regex, str) else regex.pattern + + +def constr( + *, + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + strict: bool = False, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + curtail_length: Optional[int] = None, + regex: Optional[str] = None, +) -> Type[str]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + strict=strict, + min_length=min_length, + max_length=max_length, + curtail_length=curtail_length, + regex=regex and re.compile(regex), + ) + return _registered(type("ConstrainedStrValue", (ConstrainedStr,), namespace)) + + +if TYPE_CHECKING: + StrictStr = str +else: + + class StrictStr(ConstrainedStr): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +# This types superclass should be Set[T], but cython chokes on that... +class ConstrainedSet(set): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = set + __args__: Set[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.set_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def set_length_validator(cls, v: "Optional[Set[T]]") -> "Optional[Set[T]]": + if v is None: + return None + + v = set_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.SetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.SetMaxLengthError(limit_value=cls.max_items) + + return v + + +def conset(item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None) -> Type[Set[T]]: + # __args__ is needed to conform to typing generics api + namespace = {"min_items": min_items, "max_items": max_items, "item_type": item_type, "__args__": [item_type]} + # We use new_class to be able to deal with Generic types + return new_class("ConstrainedSetValue", (ConstrainedSet,), {}, lambda ns: ns.update(namespace)) + + +# This types superclass should be FrozenSet[T], but cython chokes on that... +class ConstrainedFrozenSet(frozenset): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = frozenset + __args__: FrozenSet[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.frozenset_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def frozenset_length_validator(cls, v: "Optional[FrozenSet[T]]") -> "Optional[FrozenSet[T]]": + if v is None: + return None + + v = frozenset_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.FrozenSetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items) + + return v + + +def confrozenset( + item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None +) -> Type[FrozenSet[T]]: + # __args__ is needed to conform to typing generics api + namespace = {"min_items": min_items, "max_items": max_items, "item_type": item_type, "__args__": [item_type]} + # We use new_class to be able to deal with Generic types + return new_class("ConstrainedFrozenSetValue", (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +# This types superclass should be List[T], but cython chokes on that... +class ConstrainedList(list): # type: ignore + # Needed for pydantic to detect that this is a list + __origin__ = list + __args__: Tuple[Type[T], ...] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + unique_items: Optional[bool] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.list_length_validator + if cls.unique_items: + yield cls.unique_items_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items) + + @classmethod + def list_length_validator(cls, v: "Optional[List[T]]") -> "Optional[List[T]]": + if v is None: + return None + + v = list_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.ListMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.ListMaxLengthError(limit_value=cls.max_items) + + return v + + @classmethod + def unique_items_validator(cls, v: "Optional[List[T]]") -> "Optional[List[T]]": + if v is None: + return None + + for i, value in enumerate(v, start=1): + if value in v[i:]: + raise errors.ListUniqueItemsError() + + return v + + +def conlist( + item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: bool = None +) -> Type[List[T]]: + # __args__ is needed to conform to typing generics api + namespace = dict( + min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,) + ) + # We use new_class to be able to deal with Generic types + return new_class("ConstrainedListValue", (ConstrainedList,), {}, lambda ns: ns.update(namespace)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +if TYPE_CHECKING: + PyObject = Callable[..., Any] +else: + + class PyObject: + validate_always = True + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> Any: + if isinstance(value, Callable): + return value + + try: + value = str_validator(value) + except errors.StrError: + raise errors.PyObjectError(error_message="value is neither a valid import path not a valid callable") + + try: + return import_string(value) + except ImportError as e: + raise errors.PyObjectError(error_message=str(e)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): + gt: OptionalIntFloatDecimal = None + ge: OptionalIntFloatDecimal = None + lt: OptionalIntFloatDecimal = None + le: OptionalIntFloatDecimal = None + max_digits: OptionalInt = None + decimal_places: OptionalInt = None + multiple_of: OptionalIntFloatDecimal = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield decimal_validator + yield number_size_validator + yield number_multiple_validator + yield cls.validate + + @classmethod + def validate(cls, value: Decimal) -> Decimal: + try: + normalized_value = value.normalize() + except InvalidOperation: + normalized_value = value + digit_tuple, exponent = normalized_value.as_tuple()[1:] + if exponent in {"F", "n", "N"}: + raise errors.DecimalIsNotFiniteError() + + if exponent >= 0: + # A positive exponent adds that many trailing zeros. + digits = len(digit_tuple) + exponent + decimals = 0 + else: + # If the absolute value of the negative exponent is larger than the + # number of digits, then it's the same as the number of digits, + # because it'll consume all of the digits in digit_tuple and then + # add abs(exponent) - len(digit_tuple) leading zeros after the + # decimal point. + if abs(exponent) > len(digit_tuple): + digits = decimals = abs(exponent) + else: + digits = len(digit_tuple) + decimals = abs(exponent) + whole_digits = digits - decimals + + if cls.max_digits is not None and digits > cls.max_digits: + raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) + + if cls.decimal_places is not None and decimals > cls.decimal_places: + raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) + + if cls.max_digits is not None and cls.decimal_places is not None: + expected = cls.max_digits - cls.decimal_places + if whole_digits > expected: + raise errors.DecimalWholeDigitsError(whole_digits=expected) + + return value + + +def condecimal( + *, + gt: Decimal = None, + ge: Decimal = None, + lt: Decimal = None, + le: Decimal = None, + max_digits: Optional[int] = None, + decimal_places: Optional[int] = None, + multiple_of: Decimal = None, +) -> Type[Decimal]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of + ) + return type("ConstrainedDecimalValue", (ConstrainedDecimal,), namespace) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + UUID1 = UUID + UUID3 = UUID + UUID4 = UUID + UUID5 = UUID +else: + + class UUID1(UUID): + _required_version = 1 + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format=f"uuid{cls._required_version}") + + class UUID3(UUID1): + _required_version = 3 + + class UUID4(UUID1): + _required_version = 4 + + class UUID5(UUID1): + _required_version = 5 + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + FilePath = Path + DirectoryPath = Path +else: + + class FilePath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format="file-path") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield path_validator + yield path_exists_validator + yield cls.validate + + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_file(): + raise errors.PathNotAFileError(path=value) + + return value + + class DirectoryPath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format="directory-path") + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield path_validator + yield path_exists_validator + yield cls.validate + + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_dir(): + raise errors.PathNotADirectoryError(path=value) + + return value + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class JsonWrapper: + pass + + +class JsonMeta(type): + def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]: + if t is Any: + return Json # allow Json[Any] to replecate plain Json + return _registered(type("JsonWrapperValue", (JsonWrapper,), {"inner_type": t})) + + +if TYPE_CHECKING: + Json = Annotated[T, ...] # Json[list[str]] will be recognized by type checkers as list[str] + +else: + + class Json(metaclass=JsonMeta): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type="string", format="json-string") + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class SecretField(abc.ABC): + """ + Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`, + the `__init__()` should be part of the abstract class and the + `get_secret_value()` method should use the generic `T` type. + + However Cython doesn't support very well generics at the moment and + the generated code fails to be imported (see + https://github.com/cython/cython/issues/2753). + """ + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() + + def __str__(self) -> str: + return "**********" if self.get_secret_value() else "" + + def __hash__(self) -> int: + return hash(self.get_secret_value()) + + @abc.abstractmethod + def get_secret_value(self) -> Any: # pragma: no cover + ... + + +class SecretStr(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type="string", + writeOnly=True, + format="password", + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> "SecretStr": + if isinstance(value, cls): + return value + value = str_validator(value) + return cls(value) + + def __init__(self, value: str): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretStr('{self}')" + + def __len__(self) -> int: + return len(self._secret_value) + + def display(self) -> str: + warnings.warn("`secret_str.display()` is deprecated, use `str(secret_str)` instead", DeprecationWarning) + return str(self) + + def get_secret_value(self) -> str: + return self._secret_value + + +class SecretBytes(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type="string", + writeOnly=True, + format="password", + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> "SecretBytes": + if isinstance(value, cls): + return value + value = bytes_validator(value) + return cls(value) + + def __init__(self, value: bytes): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretBytes(b'{self}')" + + def __len__(self) -> int: + return len(self._secret_value) + + def display(self) -> str: + warnings.warn("`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead", DeprecationWarning) + return str(self) + + def get_secret_value(self) -> bytes: + return self._secret_value + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class PaymentCardBrand(str, Enum): + # If you add another card type, please also add it to the + # Hypothesis strategy in `pydantic._hypothesis_plugin`. + amex = "American Express" + mastercard = "Mastercard" + visa = "Visa" + other = "other" + + def __str__(self) -> str: + return self.value + + +class PaymentCardNumber(str): + """ + Based on: https://en.wikipedia.org/wiki/Payment_card_number + """ + + strip_whitespace: ClassVar[bool] = True + min_length: ClassVar[int] = 12 + max_length: ClassVar[int] = 19 + bin: str + last4: str + brand: PaymentCardBrand + + def __init__(self, card_number: str): + self.bin = card_number[:6] + self.last4 = card_number[-4:] + self.brand = self._get_brand(card_number) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield str_validator + yield constr_strip_whitespace + yield constr_length_validator + yield cls.validate_digits + yield cls.validate_luhn_check_digit + yield cls + yield cls.validate_length_for_brand + + @property + def masked(self) -> str: + num_masked = len(self) - 10 # len(bin) + len(last4) == 10 + return f'{self.bin}{"*" * num_masked}{self.last4}' + + @classmethod + def validate_digits(cls, card_number: str) -> str: + if not card_number.isdigit(): + raise errors.NotDigitError + return card_number + + @classmethod + def validate_luhn_check_digit(cls, card_number: str) -> str: + """ + Based on: https://en.wikipedia.org/wiki/Luhn_algorithm + """ + sum_ = int(card_number[-1]) + length = len(card_number) + parity = length % 2 + for i in range(length - 1): + digit = int(card_number[i]) + if i % 2 == parity: + digit *= 2 + if digit > 9: + digit -= 9 + sum_ += digit + valid = sum_ % 10 == 0 + if not valid: + raise errors.LuhnValidationError + return card_number + + @classmethod + def validate_length_for_brand(cls, card_number: "PaymentCardNumber") -> "PaymentCardNumber": + """ + Validate length based on BIN for major brands: + https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) + """ + required_length: Union[None, int, str] = None + if card_number.brand in PaymentCardBrand.mastercard: + required_length = 16 + valid = len(card_number) == required_length + elif card_number.brand == PaymentCardBrand.visa: + required_length = "13, 16 or 19" + valid = len(card_number) in {13, 16, 19} + elif card_number.brand == PaymentCardBrand.amex: + required_length = 15 + valid = len(card_number) == required_length + else: + valid = True + if not valid: + raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) + return card_number + + @staticmethod + def _get_brand(card_number: str) -> PaymentCardBrand: + if card_number[0] == "4": + brand = PaymentCardBrand.visa + elif 51 <= int(card_number[:2]) <= 55: + brand = PaymentCardBrand.mastercard + elif card_number[:2] in {"34", "37"}: + brand = PaymentCardBrand.amex + else: + brand = PaymentCardBrand.other + return brand + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +BYTE_SIZES = { + "b": 1, + "kb": 10**3, + "mb": 10**6, + "gb": 10**9, + "tb": 10**12, + "pb": 10**15, + "eb": 10**18, + "kib": 2**10, + "mib": 2**20, + "gib": 2**30, + "tib": 2**40, + "pib": 2**50, + "eib": 2**60, +} +BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if "i" not in k}) +byte_string_re = re.compile(r"^\s*(\d*\.?\d+)\s*(\w+)?", re.IGNORECASE) + + +class ByteSize(int): + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield cls.validate + + @classmethod + def validate(cls, v: StrIntFloat) -> "ByteSize": + try: + return cls(int(v)) + except ValueError: + pass + + str_match = byte_string_re.match(str(v)) + if str_match is None: + raise errors.InvalidByteSize() + + scalar, unit = str_match.groups() + if unit is None: + unit = "b" + + try: + unit_mult = BYTE_SIZES[unit.lower()] + except KeyError: + raise errors.InvalidByteSizeUnit(unit=unit) + + return cls(int(float(scalar) * unit_mult)) + + def human_readable(self, decimal: bool = False) -> str: + if decimal: + divisor = 1000 + units = ["B", "KB", "MB", "GB", "TB", "PB"] + final_unit = "EB" + else: + divisor = 1024 + units = ["B", "KiB", "MiB", "GiB", "TiB", "PiB"] + final_unit = "EiB" + + num = float(self) + for unit in units: + if abs(num) < divisor: + return f"{num:0.1f}{unit}" + num /= divisor + + return f"{num:0.1f}{final_unit}" + + def to(self, unit: str) -> float: + try: + unit_div = BYTE_SIZES[unit.lower()] + except KeyError: + raise errors.InvalidByteSizeUnit(unit=unit) + + return self / unit_div + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + PastDate = date + FutureDate = date +else: + + class PastDate(date): + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield parse_date + yield cls.validate + + @classmethod + def validate(cls, value: date) -> date: + if value >= date.today(): + raise errors.DateNotInThePastError() + + return value + + class FutureDate(date): + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield parse_date + yield cls.validate + + @classmethod + def validate(cls, value: date) -> date: + if value <= date.today(): + raise errors.DateNotInTheFutureError() + + return value + + +class ConstrainedDate(date, metaclass=ConstrainedNumberMeta): + gt: OptionalDate = None + ge: OptionalDate = None + lt: OptionalDate = None + le: OptionalDate = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le) + + @classmethod + def __get_validators__(cls) -> "CallableGenerator": + yield parse_date + yield number_size_validator + + +def condate( + *, + gt: date = None, + ge: date = None, + lt: date = None, + le: date = None, +) -> Type[date]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(gt=gt, ge=ge, lt=lt, le=le) + return type("ConstrainedDateValue", (ConstrainedDate,), namespace) diff --git a/vendor/pydantic/typing.py b/vendor/pydantic/typing.py new file mode 100644 index 0000000..d3c0b6a --- /dev/null +++ b/vendor/pydantic/typing.py @@ -0,0 +1,597 @@ +import sys +import typing +from collections.abc import Callable +from os import PathLike +from typing import TYPE_CHECKING, AbstractSet, Any +from typing import Callable as TypingCallable # type: ignore +from typing import ( + ClassVar, + Dict, + ForwardRef, + Generator, + Iterable, + List, + Mapping, + NewType, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + _eval_type, + cast, + get_type_hints, +) + +from typing_extensions import Annotated, Final, Literal +from typing_extensions import NotRequired as TypedDictNotRequired +from typing_extensions import Required as TypedDictRequired + +try: + from typing import _TypingBase as typing_base # type: ignore +except ImportError: + from typing import _Final as typing_base # type: ignore + +try: + from typing import GenericAlias as TypingGenericAlias # type: ignore +except ImportError: + # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) + TypingGenericAlias = () + +try: + from types import UnionType as TypesUnionType # type: ignore +except ImportError: + # python < 3.10 does not have UnionType (str | int, byte | bool and so on) + TypesUnionType = () + + +if sys.version_info < (3, 9): + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + return type_._evaluate(globalns, localns) + +else: + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + # Even though it is the right signature for python 3.9, mypy complains with + # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast... + return cast(Any, type_)._evaluate(globalns, localns, set()) + + +if sys.version_info < (3, 9): + # Ensure we always get all the whole `Annotated` hint, not just the annotated type. + # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`, + # so it already returns the full annotation + get_all_type_hints = get_type_hints + +else: + + def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any: + return get_type_hints(obj, globalns, localns, include_extras=True) + + +_T = TypeVar("_T") + +AnyCallable = TypingCallable[..., Any] +NoArgAnyCallable = TypingCallable[[], Any] + +# workaround for https://github.com/python/mypy/issues/9496 +AnyArgTCallable = TypingCallable[..., _T] + + +# Annotated[...] is implemented by returning an instance of one of these classes, depending on +# python/typing_extensions version. +AnnotatedTypeNames = {"AnnotatedMeta", "_AnnotatedAlias"} + + +LITERAL_TYPES: Set[Any] = {Literal} +if hasattr(typing, "Literal"): + LITERAL_TYPES.add(typing.Literal) + + +if sys.version_info < (3, 8): + + def get_origin(t: Type[Any]) -> Optional[Type[Any]]: + if type(t).__name__ in AnnotatedTypeNames: + # weirdly this is a runtime requirement, as well as for mypy + return cast(Type[Any], Annotated) + return getattr(t, "__origin__", None) + +else: + from typing import get_origin as _typing_get_origin + + def get_origin(tp: Type[Any]) -> Optional[Type[Any]]: + """ + We can't directly use `typing.get_origin` since we need a fallback to support + custom generic classes like `ConstrainedList` + It should be useless once https://github.com/cython/cython/issues/3537 is + solved and https://github.com/pydantic/pydantic/pull/1753 is merged. + """ + if type(tp).__name__ in AnnotatedTypeNames: + return cast(Type[Any], Annotated) # mypy complains about _SpecialForm + return _typing_get_origin(tp) or getattr(tp, "__origin__", None) + + +if sys.version_info < (3, 8): + from typing import _GenericAlias + + def get_args(t: Type[Any]) -> Tuple[Any, ...]: + """Compatibility version of get_args for python 3.7. + + Mostly compatible with the python 3.8 `typing` module version + and able to handle almost all use cases. + """ + if type(t).__name__ in AnnotatedTypeNames: + return t.__args__ + t.__metadata__ + if isinstance(t, _GenericAlias): + res = t.__args__ + if t.__origin__ is Callable and res and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return getattr(t, "__args__", ()) + +else: + from typing import get_args as _typing_get_args + + def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """ + In python 3.9, `typing.Dict`, `typing.List`, ... + do have an empty `__args__` by default (instead of the generic ~T for example). + In order to still support `Dict` for example and consider it as `Dict[Any, Any]`, + we retrieve the `_nparams` value that tells us how many parameters it needs. + """ + if hasattr(tp, "_nparams"): + return (Any,) * tp._nparams + # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple` + # in python 3.10- but now returns () for `tuple` and `Tuple`. + # This will probably be clarified in pydantic v2 + try: + if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]: # type: ignore[misc] + return ((),) + # there is a TypeError when compiled with cython + except TypeError: # pragma: no cover + pass + return () + + def get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if type(tp).__name__ in AnnotatedTypeNames: + return tp.__args__ + tp.__metadata__ + # the fallback is needed for the same reasons as `get_origin` (see above) + return _typing_get_args(tp) or getattr(tp, "__args__", ()) or _generic_get_args(tp) + + +if sys.version_info < (3, 9): + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """Python 3.9 and older only supports generics from `typing` module. + They convert strings to ForwardRef automatically. + + Examples:: + typing.List['Hero'] == typing.List[ForwardRef('Hero')] + """ + return tp + +else: + from typing import _UnionGenericAlias # type: ignore + + from typing_extensions import _AnnotatedAlias + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """ + Recursively searches for `str` type hints and replaces them with ForwardRef. + + Examples:: + convert_generics(list['Hero']) == list[ForwardRef('Hero')] + convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int + """ + origin = get_origin(tp) + if not origin or not hasattr(tp, "__args__"): + return tp + + args = get_args(tp) + + # typing.Annotated needs special treatment + if origin is Annotated: + return _AnnotatedAlias(convert_generics(args[0]), args[1:]) + + # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)` + converted = tuple( + ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg) + for arg in args + ) + + if converted == args: + return tp + elif isinstance(tp, TypingGenericAlias): + return TypingGenericAlias(origin, converted) + elif isinstance(tp, TypesUnionType): + # recreate types.UnionType (PEP604, Python >= 3.10) + return _UnionGenericAlias(origin, converted) + else: + try: + setattr(tp, "__args__", converted) + except AttributeError: + pass + return tp + + +if sys.version_info < (3, 10): + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union + + WithArgsTypes = (TypingGenericAlias,) + +else: + import types + import typing + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union or tp is types.UnionType # noqa: E721 + + WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType) + + +StrPath = Union[str, PathLike] + + +if TYPE_CHECKING: + from .fields import ModelField + + TupleGenerator = Generator[Tuple[str, Any], None, None] + DictStrAny = Dict[str, Any] + DictAny = Dict[Any, Any] + SetStr = Set[str] + ListStr = List[str] + IntStr = Union[int, str] + AbstractSetIntStr = AbstractSet[IntStr] + DictIntStrAny = Dict[IntStr, Any] + MappingIntStrAny = Mapping[IntStr, Any] + CallableGenerator = Generator[AnyCallable, None, None] + ReprArgs = Sequence[Tuple[Optional[str], Any]] + + MYPY = False + if MYPY: + AnyClassMethod = classmethod[Any] + else: + # classmethod[TargetType, CallableParamSpecType, CallableReturnType] + AnyClassMethod = classmethod[Any, Any, Any] + +__all__ = ( + "AnyCallable", + "NoArgAnyCallable", + "NoneType", + "is_none_type", + "display_as_type", + "resolve_annotations", + "is_callable_type", + "is_literal_type", + "all_literal_values", + "is_namedtuple", + "is_typeddict", + "is_typeddict_special", + "is_new_type", + "new_type_supertype", + "is_classvar", + "is_finalvar", + "update_field_forward_refs", + "update_model_forward_refs", + "TupleGenerator", + "DictStrAny", + "DictAny", + "SetStr", + "ListStr", + "IntStr", + "AbstractSetIntStr", + "DictIntStrAny", + "CallableGenerator", + "ReprArgs", + "AnyClassMethod", + "CallableGenerator", + "WithArgsTypes", + "get_args", + "get_origin", + "get_sub_types", + "typing_base", + "get_all_type_hints", + "is_union", + "StrPath", + "MappingIntStrAny", +) + + +NoneType = None.__class__ + + +NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None]) + + +if sys.version_info < (3, 8): + # Even though this implementation is slower, we need it for python 3.7: + # In python 3.7 "Literal" is not a builtin type and uses a different + # mechanism. + # for this reason `Literal[None] is Literal[None]` evaluates to `False`, + # breaking the faster implementation used for the other python versions. + + def is_none_type(type_: Any) -> bool: + return type_ in NONE_TYPES + +elif sys.version_info[:2] == (3, 8): + + def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey + # can change on very subtle changes like use of types in other modules, + # hopefully this check avoids that issue. + if is_literal_type(type_): # pragma: no cover + return all_literal_values(type_) == (None,) + return False + +else: + + def is_none_type(type_: Any) -> bool: + return type_ in NONE_TYPES + + +def display_as_type(v: Type[Any]) -> str: + if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type): + v = v.__class__ + + if is_union(get_origin(v)): + return f'Union[{", ".join(map(display_as_type, get_args(v)))}]' + + if isinstance(v, WithArgsTypes): + # Generic alias are constructs like `list[int]` + return str(v).replace("typing.", "") + + try: + return v.__name__ + except AttributeError: + # happens with typing objects + return str(v).replace("typing.", "") + + +def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]: + """ + Partially taken from typing.get_type_hints. + + Resolve string or ForwardRef annotations into type objects if possible. + """ + base_globals: Optional[Dict[str, Any]] = None + if module_name: + try: + module = sys.modules[module_name] + except KeyError: + # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 + pass + else: + base_globals = module.__dict__ + + annotations = {} + for name, value in raw_annotations.items(): + if isinstance(value, str): + if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1): + value = ForwardRef(value, is_argument=False, is_class=True) + else: + value = ForwardRef(value, is_argument=False) + try: + value = _eval_type(value, base_globals, None) + except NameError: + # this is ok, it can be fixed with update_forward_refs + pass + annotations[name] = value + return annotations + + +def is_callable_type(type_: Type[Any]) -> bool: + return type_ is Callable or get_origin(type_) is Callable + + +def is_literal_type(type_: Type[Any]) -> bool: + return Literal is not None and get_origin(type_) in LITERAL_TYPES + + +def literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + return get_args(type_) + + +def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + """ + This method is used to retrieve all Literal values as + Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) + e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` + """ + if not is_literal_type(type_): + return (type_,) + + values = literal_values(type_) + return tuple(x for value in values for x in all_literal_values(value)) + + +def is_namedtuple(type_: Type[Any]) -> bool: + """ + Check if a given class is a named tuple. + It can be either a `typing.NamedTuple` or `collections.namedtuple` + """ + from .utils import lenient_issubclass + + return lenient_issubclass(type_, tuple) and hasattr(type_, "_fields") + + +def is_typeddict(type_: Type[Any]) -> bool: + """ + Check if a given class is a typed dict (from `typing` or `typing_extensions`) + In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict) + """ + from .utils import lenient_issubclass + + return lenient_issubclass(type_, dict) and hasattr(type_, "__total__") + + +def _check_typeddict_special(type_: Any) -> bool: + return type_ is TypedDictRequired or type_ is TypedDictNotRequired + + +def is_typeddict_special(type_: Any) -> bool: + """ + Check if type is a TypedDict special form (Required or NotRequired). + """ + return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_)) + + +test_type = NewType("test_type", str) + + +def is_new_type(type_: Type[Any]) -> bool: + """ + Check whether type_ was created using typing.NewType + """ + return isinstance(type_, test_type.__class__) and hasattr(type_, "__supertype__") # type: ignore + + +def new_type_supertype(type_: Type[Any]) -> Type[Any]: + while hasattr(type_, "__supertype__"): + type_ = type_.__supertype__ + return type_ + + +def _check_classvar(v: Optional[Type[Any]]) -> bool: + if v is None: + return False + + return v.__class__ == ClassVar.__class__ and getattr(v, "_name", None) == "ClassVar" + + +def _check_finalvar(v: Optional[Type[Any]]) -> bool: + """ + Check if a given type is a `typing.Final` type. + """ + if v is None: + return False + + return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, "_name", None) == "Final") + + +def is_classvar(ann_type: Type[Any]) -> bool: + if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): + return True + + # this is an ugly workaround for class vars that contain forward references and are therefore themselves + # forward references, see #3679 + if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith("ClassVar["): + return True + + return False + + +def is_finalvar(ann_type: Type[Any]) -> bool: + return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) + + +def update_field_forward_refs(field: "ModelField", globalns: Any, localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this ModelField, globalns and localns. + """ + prepare = False + if field.type_.__class__ == ForwardRef: + prepare = True + field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) + if field.outer_type_.__class__ == ForwardRef: + prepare = True + field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None) + if prepare: + field.prepare() + + if field.sub_fields: + for sub_f in field.sub_fields: + update_field_forward_refs(sub_f, globalns=globalns, localns=localns) + + if field.discriminator_key is not None: + field.prepare_discriminated_union_sub_fields() + + +def update_model_forward_refs( + model: Type[Any], + fields: Iterable["ModelField"], + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable], + localns: "DictStrAny", + exc_to_suppress: Tuple[Type[BaseException], ...] = (), +) -> None: + """ + Try to update model fields ForwardRefs based on model and localns. + """ + if model.__module__ in sys.modules: + globalns = sys.modules[model.__module__].__dict__.copy() + else: + globalns = {} + + globalns.setdefault(model.__name__, model) + + for f in fields: + try: + update_field_forward_refs(f, globalns=globalns, localns=localns) + except exc_to_suppress: + pass + + for key in set(json_encoders.keys()): + if isinstance(key, str): + fr: ForwardRef = ForwardRef(key) + elif isinstance(key, ForwardRef): + fr = key + else: + continue + + try: + new_key = evaluate_forwardref(fr, globalns, localns or None) + except exc_to_suppress: # pragma: no cover + continue + + json_encoders[new_key] = json_encoders.pop(key) + + +def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]: + """ + Tries to get the class of a Type[T] annotation. Returns True if Type is used + without brackets. Otherwise returns None. + """ + if type_ is type: + return True + + if get_origin(type_) is None: + return None + + args = get_args(type_) + if not args or not isinstance(args[0], type): + return True + else: + return args[0] + + +def get_sub_types(tp: Any) -> List[Any]: + """ + Return all the types that are allowed by type `tp` + `tp` can be a `Union` of allowed types or an `Annotated` type + """ + origin = get_origin(tp) + if origin is Annotated: + return get_sub_types(get_args(tp)[0]) + elif is_union(origin): + return [x for t in get_args(tp) for x in get_sub_types(t)] + else: + return [tp] diff --git a/vendor/pydantic/utils.py b/vendor/pydantic/utils.py new file mode 100644 index 0000000..26a07cf --- /dev/null +++ b/vendor/pydantic/utils.py @@ -0,0 +1,803 @@ +import keyword +import warnings +import weakref +from collections import OrderedDict, defaultdict, deque +from copy import deepcopy +from itertools import islice, zip_longest +from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + Collection, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import Annotated + +from .errors import ConfigError +from .typing import ( + NoneType, + WithArgsTypes, + all_literal_values, + display_as_type, + get_args, + get_origin, + is_literal_type, + is_union, +) +from .version import version_info + +if TYPE_CHECKING: + from inspect import Signature + from pathlib import Path + + from .config import BaseConfig + from .dataclasses import Dataclass + from .fields import ModelField + from .main import BaseModel + from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs + + RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] + +__all__ = ( + "import_string", + "sequence_like", + "validate_field_name", + "lenient_isinstance", + "lenient_issubclass", + "in_ipython", + "is_valid_identifier", + "deep_update", + "update_not_none", + "almost_equal_floats", + "get_model", + "to_camel", + "is_valid_field", + "smart_deepcopy", + "PyObjectStr", + "Representation", + "GetterDict", + "ValueItems", + "version_info", # required here to match behaviour in v1.3 + "ClassAttribute", + "path_type", + "ROOT_KEY", + "get_unique_discriminator_alias", + "get_discriminator_alias_and_values", + "DUNDER_ATTRIBUTES", +) + +ROOT_KEY = "__root__" +# these are types that are returned unchanged by deepcopy +IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { + int, + float, + complex, + str, + bool, + bytes, + type, + NoneType, + FunctionType, + BuiltinFunctionType, + LambdaType, + weakref.ref, + CodeType, + # note: including ModuleType will differ from behaviour of deepcopy by not producing error. + # It might be not a good idea in general, but considering that this function used only internally + # against default values of fields, this will allow to actually have a field with module as default value + ModuleType, + NotImplemented.__class__, + Ellipsis.__class__, +} + +# these are types that if empty, might be copied with simple copy() instead of deepcopy() +BUILTIN_COLLECTIONS: Set[Type[Any]] = { + list, + set, + tuple, + frozenset, + dict, + OrderedDict, + defaultdict, + deque, +} + + +def import_string(dotted_path: str) -> Any: + """ + Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the + last name in the path. Raise ImportError if the import fails. + """ + from importlib import import_module + + try: + module_path, class_name = dotted_path.strip(" ").rsplit(".", 1) + except ValueError as e: + raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e + + module = import_module(module_path) + try: + return getattr(module, class_name) + except AttributeError as e: + raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e + + +def truncate(v: Union[str], *, max_len: int = 80) -> str: + """ + Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long + """ + warnings.warn("`truncate` is no-longer used by pydantic and is deprecated", DeprecationWarning) + if isinstance(v, str) and len(v) > (max_len - 2): + # -3 so quote + string + … + quote has correct length + return (v[: (max_len - 3)] + "…").__repr__() + try: + v = v.__repr__() + except TypeError: + v = v.__class__.__repr__(v) # in case v is a type + if len(v) > max_len: + v = v[: max_len - 1] + "…" + return v + + +def sequence_like(v: Any) -> bool: + return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) + + +def validate_field_name(bases: List[Type["BaseModel"]], field_name: str) -> None: + """ + Ensure that the field's name does not shadow an existing attribute of the model. + """ + for base in bases: + if getattr(base, field_name, None): + raise NameError( + f'Field name "{field_name}" shadows a BaseModel attribute; ' + f"use a different field name with \"alias='{field_name}'\"." + ) + + +def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(o, class_or_tuple) # type: ignore[arg-type] + except TypeError: + return False + + +def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] + except TypeError: + if isinstance(cls, WithArgsTypes): + return False + raise # pragma: no cover + + +def in_ipython() -> bool: + """ + Check whether we're in an ipython environment, including jupyter notebooks. + """ + try: + eval("__IPYTHON__") + except NameError: + return False + else: # pragma: no cover + return True + + +def is_valid_identifier(identifier: str) -> bool: + """ + Checks that a string is a valid identifier and not a Python keyword. + :param identifier: The identifier to test. + :return: True if the identifier is valid. + """ + return identifier.isidentifier() and not keyword.iskeyword(identifier) + + +KeyType = TypeVar("KeyType") + + +def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]: + updated_mapping = mapping.copy() + for updating_mapping in updating_mappings: + for k, v in updating_mapping.items(): + if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): + updated_mapping[k] = deep_update(updated_mapping[k], v) + else: + updated_mapping[k] = v + return updated_mapping + + +def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None: + mapping.update({k: v for k, v in update.items() if v is not None}) + + +def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: + """ + Return True if two floats are almost equal + """ + return abs(value_1 - value_2) <= delta + + +def generate_model_signature( + init: Callable[..., None], fields: Dict[str, "ModelField"], config: Type["BaseConfig"] +) -> "Signature": + """ + Generate signature for model based on its fields + """ + from inspect import Parameter, Signature, signature + + from .config import Extra + + present_params = signature(init).parameters.values() + merged_params: Dict[str, Parameter] = {} + var_kw = None + use_var_kw = False + + for param in islice(present_params, 1, None): # skip self arg + if param.kind is param.VAR_KEYWORD: + var_kw = param + continue + merged_params[param.name] = param + + if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through + allow_names = config.allow_population_by_field_name + for field_name, field in fields.items(): + param_name = field.alias + if field_name in merged_params or param_name in merged_params: + continue + elif not is_valid_identifier(param_name): + if allow_names and is_valid_identifier(field_name): + param_name = field_name + else: + use_var_kw = True + continue + + # TODO: replace annotation with actual expected types once #1055 solved + kwargs = {"default": field.default} if not field.required else {} + merged_params[param_name] = Parameter( + param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs + ) + + if config.extra is Extra.allow: + use_var_kw = True + + if var_kw and use_var_kw: + # Make sure the parameter for extra kwargs + # does not have the same name as a field + default_model_signature = [ + ("__pydantic_self__", Parameter.POSITIONAL_OR_KEYWORD), + ("data", Parameter.VAR_KEYWORD), + ] + if [(p.name, p.kind) for p in present_params] == default_model_signature: + # if this is the standard model signature, use extra_data as the extra args name + var_kw_name = "extra_data" + else: + # else start from var_kw + var_kw_name = var_kw.name + + # generate a name that's definitely unique + while var_kw_name in fields: + var_kw_name += "_" + merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) + + return Signature(parameters=list(merged_params.values()), return_annotation=None) + + +def get_model(obj: Union[Type["BaseModel"], Type["Dataclass"]]) -> Type["BaseModel"]: + from .main import BaseModel + + try: + model_cls = obj.__pydantic_model__ # type: ignore + except AttributeError: + model_cls = obj + + if not issubclass(model_cls, BaseModel): + raise TypeError("Unsupported type, must be either BaseModel or dataclass") + return model_cls + + +def to_camel(string: str) -> str: + return "".join(word.capitalize() for word in string.split("_")) + + +def to_lower_camel(string: str) -> str: + if len(string) >= 1: + pascal_string = to_camel(string) + return pascal_string[0].lower() + pascal_string[1:] + return string.lower() + + +T = TypeVar("T") + + +def unique_list( + input_list: Union[List[T], Tuple[T, ...]], + *, + name_factory: Callable[[T], str] = str, +) -> List[T]: + """ + Make a list unique while maintaining order. + We update the list if another one with the same name is set + (e.g. root validator overridden in subclass) + """ + result: List[T] = [] + result_names: List[str] = [] + for v in input_list: + v_name = name_factory(v) + if v_name not in result_names: + result_names.append(v_name) + result.append(v) + else: + result[result_names.index(v_name)] = v + + return result + + +class PyObjectStr(str): + """ + String class where repr doesn't include quotes. Useful with Representation when you want to return a string + representation of something that valid (or pseudo-valid) python. + """ + + def __repr__(self) -> str: + return str(self) + + +class Representation: + """ + Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. + + __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations + of objects. + """ + + __slots__: Tuple[str, ...] = tuple() + + def __repr_args__(self) -> "ReprArgs": + """ + Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """ + Name of the instance's class, used in __repr__. + """ + return self.__class__.__name__ + + def __repr_str__(self, join_str: str) -> str: + return join_str.join(repr(v) if a is None else f"{a}={v!r}" for a, v in self.__repr_args__()) + + def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: + """ + Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects + """ + yield self.__repr_name__() + "(" + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + "=" + yield fmt(value) + yield "," + yield 0 + yield -1 + yield ")" + + def __str__(self) -> str: + return self.__repr_str__(" ") + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __rich_repr__(self) -> "RichReprResult": + """Get fields for Rich library""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + + +class GetterDict(Representation): + """ + Hack to make object's smell just enough like dicts for validate_model. + + We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. + """ + + __slots__ = ("_obj",) + + def __init__(self, obj: Any): + self._obj = obj + + def __getitem__(self, key: str) -> Any: + try: + return getattr(self._obj, key) + except AttributeError as e: + raise KeyError(key) from e + + def get(self, key: Any, default: Any = None) -> Any: + return getattr(self._obj, key, default) + + def extra_keys(self) -> Set[Any]: + """ + We don't want to get any other attributes of obj if the model didn't explicitly ask for them + """ + return set() + + def keys(self) -> List[Any]: + """ + Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python + dictionaries. + """ + return list(self) + + def values(self) -> List[Any]: + return [self[k] for k in self] + + def items(self) -> Iterator[Tuple[str, Any]]: + for k in self: + yield k, self.get(k) + + def __iter__(self) -> Iterator[str]: + for name in dir(self._obj): + if not name.startswith("_"): + yield name + + def __len__(self) -> int: + return sum(1 for _ in self) + + def __contains__(self, item: Any) -> bool: + return item in self.keys() + + def __eq__(self, other: Any) -> bool: + return dict(self) == dict(other.items()) + + def __repr_args__(self) -> "ReprArgs": + return [(None, dict(self))] + + def __repr_name__(self) -> str: + return f"GetterDict[{display_as_type(self._obj)}]" + + +class ValueItems(Representation): + """ + Class for more convenient calculation of excluded or included fields on values. + """ + + __slots__ = ("_items", "_type") + + def __init__(self, value: Any, items: Union["AbstractSetIntStr", "MappingIntStrAny"]) -> None: + items = self._coerce_items(items) + + if isinstance(value, (list, tuple)): + items = self._normalize_indexes(items, len(value)) + + self._items: "MappingIntStrAny" = items + + def is_excluded(self, item: Any) -> bool: + """ + Check if item is fully excluded. + + :param item: key or index of a value + """ + return self.is_true(self._items.get(item)) + + def is_included(self, item: Any) -> bool: + """ + Check if value is contained in self._items + + :param item: key or index of value + """ + return item in self._items + + def for_element(self, e: "IntStr") -> Optional[Union["AbstractSetIntStr", "MappingIntStrAny"]]: + """ + :param e: key or index of element on value + :return: raw values for element if self._items is dict and contain needed element + """ + + item = self._items.get(e) + return item if not self.is_true(item) else None + + def _normalize_indexes(self, items: "MappingIntStrAny", v_length: int) -> "DictIntStrAny": + """ + :param items: dict or set of indexes which will be normalized + :param v_length: length of sequence indexes of which will be + + >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) + {0: True, 2: True, 3: True} + >>> self._normalize_indexes({'__all__': True}, 4) + {0: True, 1: True, 2: True, 3: True} + """ + + normalized_items: "DictIntStrAny" = {} + all_items = None + for i, v in items.items(): + if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): + raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') + if i == "__all__": + all_items = self._coerce_value(v) + continue + if not isinstance(i, int): + raise TypeError( + "Excluding fields from a sequence of sub-models or dicts must be performed index-wise: " + 'expected integer keys or keyword "__all__"' + ) + normalized_i = v_length + i if i < 0 else i + normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) + + if not all_items: + return normalized_items + if self.is_true(all_items): + for i in range(v_length): + normalized_items.setdefault(i, ...) + return normalized_items + for i in range(v_length): + normalized_item = normalized_items.setdefault(i, {}) + if not self.is_true(normalized_item): + normalized_items[i] = self.merge(all_items, normalized_item) + return normalized_items + + @classmethod + def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: + """ + Merge a ``base`` item with an ``override`` item. + + Both ``base`` and ``override`` are converted to dictionaries if possible. + Sets are converted to dictionaries with the sets entries as keys and + Ellipsis as values. + + Each key-value pair existing in ``base`` is merged with ``override``, + while the rest of the key-value pairs are updated recursively with this function. + + Merging takes place based on the "union" of keys if ``intersect`` is + set to ``False`` (default) and on the intersection of keys if + ``intersect`` is set to ``True``. + """ + override = cls._coerce_value(override) + base = cls._coerce_value(base) + if override is None: + return base + if cls.is_true(base) or base is None: + return override + if cls.is_true(override): + return base if intersect else override + + # intersection or union of keys while preserving ordering: + if intersect: + merge_keys = [k for k in base if k in override] + [k for k in override if k in base] + else: + merge_keys = list(base) + [k for k in override if k not in base] + + merged: "DictIntStrAny" = {} + for k in merge_keys: + merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) + if merged_item is not None: + merged[k] = merged_item + + return merged + + @staticmethod + def _coerce_items(items: Union["AbstractSetIntStr", "MappingIntStrAny"]) -> "MappingIntStrAny": + if isinstance(items, Mapping): + pass + elif isinstance(items, AbstractSet): + items = dict.fromkeys(items, ...) + else: + class_name = getattr(items, "__class__", "???") + assert_never( + items, + f"Unexpected type of exclude value {class_name}", + ) + return items + + @classmethod + def _coerce_value(cls, value: Any) -> Any: + if value is None or cls.is_true(value): + return value + return cls._coerce_items(value) + + @staticmethod + def is_true(v: Any) -> bool: + return v is True or v is ... + + def __repr_args__(self) -> "ReprArgs": + return [(None, self._items)] + + +class ClassAttribute: + """ + Hide class attribute from its instances + """ + + __slots__ = ( + "name", + "value", + ) + + def __init__(self, name: str, value: Any) -> None: + self.name = name + self.value = value + + def __get__(self, instance: Any, owner: Type[Any]) -> None: + if instance is None: + return self.value + raise AttributeError(f"{self.name!r} attribute of {owner.__name__!r} is class-only") + + +path_types = { + "is_dir": "directory", + "is_file": "file", + "is_mount": "mount point", + "is_symlink": "symlink", + "is_block_device": "block device", + "is_char_device": "char device", + "is_fifo": "FIFO", + "is_socket": "socket", +} + + +def path_type(p: "Path") -> str: + """ + Find out what sort of thing a path is. + """ + assert p.exists(), "path does not exist" + for method, name in path_types.items(): + if getattr(p, method)(): + return name + + return "unknown" + + +Obj = TypeVar("Obj") + + +def smart_deepcopy(obj: Obj) -> Obj: + """ + Return type as is for immutable built-in types + Use obj.copy() for built-in empty collections + Use copy.deepcopy() for non-empty collections and unknown objects + """ + + obj_type = obj.__class__ + if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: + return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway + try: + if not obj and obj_type in BUILTIN_COLLECTIONS: + # faster way for empty collections, no need to copy its members + return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method + except (TypeError, ValueError, RuntimeError): + # do we really dare to catch ALL errors? Seems a bit risky + pass + + return deepcopy(obj) # slowest way when we actually might need a deepcopy + + +def is_valid_field(name: str) -> bool: + if not name.startswith("_"): + return True + return ROOT_KEY == name + + +DUNDER_ATTRIBUTES = { + "__annotations__", + "__classcell__", + "__doc__", + "__module__", + "__orig_bases__", + "__orig_class__", + "__qualname__", +} + + +def is_valid_private_name(name: str) -> bool: + return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES + + +_EMPTY = object() + + +def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: + """ + Check that the items of `left` are the same objects as those in `right`. + + >>> a, b = object(), object() + >>> all_identical([a, b, a], [a, b, a]) + True + >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" + False + """ + for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY): + if left_item is not right_item: + return False + return True + + +def assert_never(obj: NoReturn, msg: str) -> NoReturn: + """ + Helper to make sure that we have covered all possible types. + + This is mostly useful for ``mypy``, docs: + https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks + """ + raise TypeError(msg) + + +def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str: + """Validate that all aliases are the same and if that's the case return the alias""" + unique_aliases = set(all_aliases) + if len(unique_aliases) > 1: + raise ConfigError( + f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})' + ) + return unique_aliases.pop() + + +def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]: + """ + Get alias and all valid values in the `Literal` type of the discriminator field + `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many. + """ + is_root_model = getattr(tp, "__custom_root_type__", False) + + if get_origin(tp) is Annotated: + tp = get_args(tp)[0] + + if hasattr(tp, "__pydantic_model__"): + tp = tp.__pydantic_model__ + + if is_union(get_origin(tp)): + alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key) + return alias, tuple(v for values in all_values for v in values) + elif is_root_model: + union_type = tp.__fields__[ROOT_KEY].type_ + alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key) + + if len(set(all_values)) > 1: + raise ConfigError( + f"Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}" + ) + + return alias, all_values[0] + + else: + try: + t_discriminator_type = tp.__fields__[discriminator_key].type_ + except AttributeError as e: + raise TypeError(f"Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`") from e + except KeyError as e: + raise ConfigError(f"Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}") from e + + if not is_literal_type(t_discriminator_type): + raise ConfigError(f"Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`") + + return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type) + + +def _get_union_alias_and_all_values( + union_type: Type[Any], discriminator_key: str +) -> Tuple[str, Tuple[Tuple[str, ...], ...]]: + zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)] + # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))] + all_aliases, all_values = zip(*zipped_aliases_values) + return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values diff --git a/vendor/pydantic/validators.py b/vendor/pydantic/validators.py new file mode 100644 index 0000000..bf0f1cc --- /dev/null +++ b/vendor/pydantic/validators.py @@ -0,0 +1,765 @@ +import math +import re +from collections import OrderedDict, deque +from collections.abc import Hashable as CollectionsHashable +from datetime import date, datetime, time, timedelta +from decimal import Decimal, DecimalException +from enum import Enum, IntEnum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Hashable, + List, + NamedTuple, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) +from uuid import UUID + +from . import errors +from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time +from .typing import ( + AnyCallable, + all_literal_values, + display_as_type, + get_class, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_typeddict, +) +from .utils import almost_equal_floats, lenient_issubclass, sequence_like + +if TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from .config import BaseConfig + from .fields import ModelField + from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt + + ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] + AnyOrderedDict = OrderedDict[Any, Any] + Number = Union[int, float, Decimal] + StrBytes = Union[str, bytes] + + +def str_validator(v: Any) -> Union[str]: + if isinstance(v, str): + if isinstance(v, Enum): + return v.value + else: + return v + elif isinstance(v, (float, int, Decimal)): + # is there anything else we want to add here? If you think so, create an issue. + return str(v) + elif isinstance(v, (bytes, bytearray)): + return v.decode() + else: + raise errors.StrError() + + +def strict_str_validator(v: Any) -> Union[str]: + if isinstance(v, str) and not isinstance(v, Enum): + return v + raise errors.StrError() + + +def bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + elif isinstance(v, str): + return v.encode() + elif isinstance(v, (float, int, Decimal)): + return str(v).encode() + else: + raise errors.BytesError() + + +def strict_bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + else: + raise errors.BytesError() + + +BOOL_FALSE = {0, "0", "off", "f", "false", "n", "no"} +BOOL_TRUE = {1, "1", "on", "t", "true", "y", "yes"} + + +def bool_validator(v: Any) -> bool: + if v is True or v is False: + return v + if isinstance(v, bytes): + v = v.decode() + if isinstance(v, str): + v = v.lower() + try: + if v in BOOL_TRUE: + return True + if v in BOOL_FALSE: + return False + except TypeError: + raise errors.BoolError() + raise errors.BoolError() + + +# matches the default limit cpython, see https://github.com/python/cpython/pull/96500 +max_str_int = 4_300 + + +def int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + + # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778 + # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10 + # but better to check here until then. + # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation + # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to + # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson + if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int: + raise errors.IntegerError() + + try: + return int(v) + except (TypeError, ValueError, OverflowError): + raise errors.IntegerError() + + +def strict_int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + raise errors.IntegerError() + + +def float_validator(v: Any) -> float: + if isinstance(v, float): + return v + + try: + return float(v) + except (TypeError, ValueError): + raise errors.FloatError() + + +def strict_float_validator(v: Any) -> float: + if isinstance(v, float): + return v + raise errors.FloatError() + + +def float_finite_validator(v: "Number", field: "ModelField", config: "BaseConfig") -> "Number": + allow_inf_nan = getattr(field.type_, "allow_inf_nan", None) + if allow_inf_nan is None: + allow_inf_nan = config.allow_inf_nan + + if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)): + raise errors.NumberNotFiniteError() + return v + + +def number_multiple_validator(v: "Number", field: "ModelField") -> "Number": + field_type: ConstrainedNumber = field.type_ + if field_type.multiple_of is not None: + mod = float(v) / float(field_type.multiple_of) % 1 + if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): + raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) + return v + + +def number_size_validator(v: "Number", field: "ModelField") -> "Number": + field_type: ConstrainedNumber = field.type_ + if field_type.gt is not None and not v > field_type.gt: + raise errors.NumberNotGtError(limit_value=field_type.gt) + elif field_type.ge is not None and not v >= field_type.ge: + raise errors.NumberNotGeError(limit_value=field_type.ge) + + if field_type.lt is not None and not v < field_type.lt: + raise errors.NumberNotLtError(limit_value=field_type.lt) + if field_type.le is not None and not v <= field_type.le: + raise errors.NumberNotLeError(limit_value=field_type.le) + + return v + + +def constant_validator(v: "Any", field: "ModelField") -> "Any": + """Validate ``const`` fields. + + The value provided for a ``const`` field must be equal to the default value + of the field. This is to support the keyword of the same name in JSON + Schema. + """ + if v != field.default: + raise errors.WrongConstantError(given=v, permitted=[field.default]) + + return v + + +def anystr_length_validator(v: "StrBytes", config: "BaseConfig") -> "StrBytes": + v_len = len(v) + + min_length = config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def anystr_strip_whitespace(v: "StrBytes") -> "StrBytes": + return v.strip() + + +def anystr_upper(v: "StrBytes") -> "StrBytes": + return v.upper() + + +def anystr_lower(v: "StrBytes") -> "StrBytes": + return v.lower() + + +def ordered_dict_validator(v: Any) -> "AnyOrderedDict": + if isinstance(v, OrderedDict): + return v + + try: + return OrderedDict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def dict_validator(v: Any) -> Dict[Any, Any]: + if isinstance(v, dict): + return v + + try: + return dict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def list_validator(v: Any) -> List[Any]: + if isinstance(v, list): + return v + elif sequence_like(v): + return list(v) + else: + raise errors.ListError() + + +def tuple_validator(v: Any) -> Tuple[Any, ...]: + if isinstance(v, tuple): + return v + elif sequence_like(v): + return tuple(v) + else: + raise errors.TupleError() + + +def set_validator(v: Any) -> Set[Any]: + if isinstance(v, set): + return v + elif sequence_like(v): + return set(v) + else: + raise errors.SetError() + + +def frozenset_validator(v: Any) -> FrozenSet[Any]: + if isinstance(v, frozenset): + return v + elif sequence_like(v): + return frozenset(v) + else: + raise errors.FrozenSetError() + + +def deque_validator(v: Any) -> Deque[Any]: + if isinstance(v, deque): + return v + elif sequence_like(v): + return deque(v) + else: + raise errors.DequeError() + + +def enum_member_validator(v: Any, field: "ModelField", config: "BaseConfig") -> Enum: + try: + enum_v = field.type_(v) + except ValueError: + # field.type_ should be an enum, so will be iterable + raise errors.EnumMemberError(enum_values=list(field.type_)) + return enum_v.value if config.use_enum_values else enum_v + + +def uuid_validator(v: Any, field: "ModelField") -> UUID: + try: + if isinstance(v, str): + v = UUID(v) + elif isinstance(v, (bytes, bytearray)): + try: + v = UUID(v.decode()) + except ValueError: + # 16 bytes in big-endian order as the bytes argument fail + # the above check + v = UUID(bytes=v) + except ValueError: + raise errors.UUIDError() + + if not isinstance(v, UUID): + raise errors.UUIDError() + + required_version = getattr(field.type_, "_required_version", None) + if required_version and v.version != required_version: + raise errors.UUIDVersionError(required_version=required_version) + + return v + + +def decimal_validator(v: Any) -> Decimal: + if isinstance(v, Decimal): + return v + elif isinstance(v, (bytes, bytearray)): + v = v.decode() + + v = str(v).strip() + + try: + v = Decimal(v) + except DecimalException: + raise errors.DecimalError() + + if not v.is_finite(): + raise errors.DecimalIsNotFiniteError() + + return v + + +def hashable_validator(v: Any) -> Hashable: + if isinstance(v, Hashable): + return v + + raise errors.HashableError() + + +def ip_v4_address_validator(v: Any) -> IPv4Address: + if isinstance(v, IPv4Address): + return v + + try: + return IPv4Address(v) + except ValueError: + raise errors.IPv4AddressError() + + +def ip_v6_address_validator(v: Any) -> IPv6Address: + if isinstance(v, IPv6Address): + return v + + try: + return IPv6Address(v) + except ValueError: + raise errors.IPv6AddressError() + + +def ip_v4_network_validator(v: Any) -> IPv4Network: + """ + Assume IPv4Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network + """ + if isinstance(v, IPv4Network): + return v + + try: + return IPv4Network(v) + except ValueError: + raise errors.IPv4NetworkError() + + +def ip_v6_network_validator(v: Any) -> IPv6Network: + """ + Assume IPv6Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network + """ + if isinstance(v, IPv6Network): + return v + + try: + return IPv6Network(v) + except ValueError: + raise errors.IPv6NetworkError() + + +def ip_v4_interface_validator(v: Any) -> IPv4Interface: + if isinstance(v, IPv4Interface): + return v + + try: + return IPv4Interface(v) + except ValueError: + raise errors.IPv4InterfaceError() + + +def ip_v6_interface_validator(v: Any) -> IPv6Interface: + if isinstance(v, IPv6Interface): + return v + + try: + return IPv6Interface(v) + except ValueError: + raise errors.IPv6InterfaceError() + + +def path_validator(v: Any) -> Path: + if isinstance(v, Path): + return v + + try: + return Path(v) + except TypeError: + raise errors.PathError() + + +def path_exists_validator(v: Any) -> Path: + if not v.exists(): + raise errors.PathNotExistsError(path=v) + + return v + + +def callable_validator(v: Any) -> AnyCallable: + """ + Perform a simple check if the value is callable. + + Note: complete matching of argument type hints and return types is not performed + """ + if callable(v): + return v + + raise errors.CallableError(value=v) + + +def enum_validator(v: Any) -> Enum: + if isinstance(v, Enum): + return v + + raise errors.EnumError(value=v) + + +def int_enum_validator(v: Any) -> IntEnum: + if isinstance(v, IntEnum): + return v + + raise errors.IntEnumError(value=v) + + +def make_literal_validator(type_: Any) -> Callable[[Any], Any]: + permitted_choices = all_literal_values(type_) + + # To have a O(1) complexity and still return one of the values set inside the `Literal`, + # we create a dict with the set values (a set causes some problems with the way intersection works). + # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`) + allowed_choices = {v: v for v in permitted_choices} + + def literal_validator(v: Any) -> Any: + try: + return allowed_choices[v] + except (KeyError, TypeError): + raise errors.WrongConstantError(given=v, permitted=permitted_choices) + + return literal_validator + + +def constr_length_validator(v: "StrBytes", field: "ModelField", config: "BaseConfig") -> "StrBytes": + v_len = len(v) + + min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def constr_strip_whitespace(v: "StrBytes", field: "ModelField", config: "BaseConfig") -> "StrBytes": + strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace + if strip_whitespace: + v = v.strip() + + return v + + +def constr_upper(v: "StrBytes", field: "ModelField", config: "BaseConfig") -> "StrBytes": + upper = field.type_.to_upper or config.anystr_upper + if upper: + v = v.upper() + + return v + + +def constr_lower(v: "StrBytes", field: "ModelField", config: "BaseConfig") -> "StrBytes": + lower = field.type_.to_lower or config.anystr_lower + if lower: + v = v.lower() + return v + + +def validate_json(v: Any, config: "BaseConfig") -> Any: + if v is None: + # pass None through to other validators + return v + try: + return config.json_loads(v) # type: ignore + except ValueError: + raise errors.JsonError() + except TypeError: + raise errors.JsonTypeError() + + +T = TypeVar("T") + + +def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: + def arbitrary_type_validator(v: Any) -> T: + if isinstance(v, type_): + return v + raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) + + return arbitrary_type_validator + + +def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: + def class_validator(v: Any) -> Type[T]: + if lenient_issubclass(v, type_): + return v + raise errors.SubclassError(expected_class=type_) + + return class_validator + + +def any_class_validator(v: Any) -> Type[T]: + if isinstance(v, type): + return v + raise errors.ClassError() + + +def none_validator(v: Any) -> "Literal[None]": + if v is None: + return v + raise errors.NotNoneError() + + +def pattern_validator(v: Any) -> Pattern[str]: + if isinstance(v, Pattern): + return v + + str_value = str_validator(v) + + try: + return re.compile(str_value) + except re.error: + raise errors.PatternError() + + +NamedTupleT = TypeVar("NamedTupleT", bound=NamedTuple) + + +def make_namedtuple_validator( + namedtuple_cls: Type[NamedTupleT], config: Type["BaseConfig"] +) -> Callable[[Tuple[Any, ...]], NamedTupleT]: + from .annotated_types import create_model_from_namedtuple + + NamedTupleModel = create_model_from_namedtuple( + namedtuple_cls, + __config__=config, + __module__=namedtuple_cls.__module__, + ) + namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined] + + def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT: + annotations = NamedTupleModel.__annotations__ + + if len(values) > len(annotations): + raise errors.ListMaxLengthError(limit_value=len(annotations)) + + dict_values: Dict[str, Any] = dict(zip(annotations, values)) + validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values)) + return namedtuple_cls(**validated_dict_values) + + return namedtuple_validator + + +def make_typeddict_validator( + typeddict_cls: Type["TypedDict"], config: Type["BaseConfig"] # type: ignore[valid-type] +) -> Callable[[Any], Dict[str, Any]]: + from .annotated_types import create_model_from_typeddict + + TypedDictModel = create_model_from_typeddict( + typeddict_cls, + __config__=config, + __module__=typeddict_cls.__module__, + ) + typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined] + + def typeddict_validator(values: "TypedDict") -> Dict[str, Any]: # type: ignore[valid-type] + return TypedDictModel.parse_obj(values).dict(exclude_unset=True) + + return typeddict_validator + + +class IfConfig: + def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None: + self.validator = validator + self.config_attr_names = config_attr_names + self.ignored_value = ignored_value + + def check(self, config: Type["BaseConfig"]) -> bool: + return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names) + + +# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, +# IPv4Interface before IPv4Address, etc +_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [ + (IntEnum, [int_validator, enum_member_validator]), + (Enum, [enum_member_validator]), + ( + str, + [ + str_validator, + IfConfig(anystr_strip_whitespace, "anystr_strip_whitespace"), + IfConfig(anystr_upper, "anystr_upper"), + IfConfig(anystr_lower, "anystr_lower"), + IfConfig(anystr_length_validator, "min_anystr_length", "max_anystr_length"), + ], + ), + ( + bytes, + [ + bytes_validator, + IfConfig(anystr_strip_whitespace, "anystr_strip_whitespace"), + IfConfig(anystr_upper, "anystr_upper"), + IfConfig(anystr_lower, "anystr_lower"), + IfConfig(anystr_length_validator, "min_anystr_length", "max_anystr_length"), + ], + ), + (bool, [bool_validator]), + (int, [int_validator]), + (float, [float_validator, IfConfig(float_finite_validator, "allow_inf_nan", ignored_value=True)]), + (Path, [path_validator]), + (datetime, [parse_datetime]), + (date, [parse_date]), + (time, [parse_time]), + (timedelta, [parse_duration]), + (OrderedDict, [ordered_dict_validator]), + (dict, [dict_validator]), + (list, [list_validator]), + (tuple, [tuple_validator]), + (set, [set_validator]), + (frozenset, [frozenset_validator]), + (deque, [deque_validator]), + (UUID, [uuid_validator]), + (Decimal, [decimal_validator]), + (IPv4Interface, [ip_v4_interface_validator]), + (IPv6Interface, [ip_v6_interface_validator]), + (IPv4Address, [ip_v4_address_validator]), + (IPv6Address, [ip_v6_address_validator]), + (IPv4Network, [ip_v4_network_validator]), + (IPv6Network, [ip_v6_network_validator]), +] + + +def find_validators( # noqa: C901 (ignore complexity) + type_: Type[Any], config: Type["BaseConfig"] +) -> Generator[AnyCallable, None, None]: + from .dataclasses import is_builtin_dataclass, make_dataclass_validator + + if type_ is Any or type_ is object: + return + type_type = type_.__class__ + if type_type == ForwardRef or type_type == TypeVar: + return + + if is_none_type(type_): + yield none_validator + return + if type_ is Pattern or type_ is re.Pattern: + yield pattern_validator + return + if type_ is Hashable or type_ is CollectionsHashable: + yield hashable_validator + return + if is_callable_type(type_): + yield callable_validator + return + if is_literal_type(type_): + yield make_literal_validator(type_) + return + if is_builtin_dataclass(type_): + yield from make_dataclass_validator(type_, config) + return + if type_ is Enum: + yield enum_validator + return + if type_ is IntEnum: + yield int_enum_validator + return + if is_namedtuple(type_): + yield tuple_validator + yield make_namedtuple_validator(type_, config) + return + if is_typeddict(type_): + yield make_typeddict_validator(type_, config) + return + + class_ = get_class(type_) + if class_ is not None: + if class_ is not Any and isinstance(class_, type): + yield make_class_validator(class_) + else: + yield any_class_validator + return + + for val_type, validators in _VALIDATORS: + try: + if issubclass(type_, val_type): + for v in validators: + if isinstance(v, IfConfig): + if v.check(config): + yield v.validator + else: + yield v + return + except TypeError: + raise RuntimeError(f"error checking inheritance of {type_!r} (type: {display_as_type(type_)})") + + if config.arbitrary_types_allowed: + yield make_arbitrary_type_validator(type_) + else: + raise RuntimeError(f"no validator found for {type_}, see `arbitrary_types_allowed` in Config") diff --git a/vendor/pydantic/version.py b/vendor/pydantic/version.py new file mode 100644 index 0000000..8bea34e --- /dev/null +++ b/vendor/pydantic/version.py @@ -0,0 +1,38 @@ +__all__ = "compiled", "VERSION", "version_info" + +VERSION = "1.10.14" + +try: + import cython # type: ignore +except ImportError: + compiled: bool = False +else: # pragma: no cover + try: + compiled = cython.compiled + except AttributeError: + compiled = False + + +def version_info() -> str: + import platform + import sys + from importlib import import_module + from pathlib import Path + + optional_deps = [] + for p in ("devtools", "dotenv", "email-validator", "typing-extensions"): + try: + import_module(p.replace("-", "_")) + except ImportError: + continue + optional_deps.append(p) + + info = { + "pydantic version": VERSION, + "pydantic compiled": compiled, + "install path": Path(__file__).resolve().parent, + "python version": sys.version, + "platform": platform.platform(), + "optional deps. installed": optional_deps, + } + return "\n".join("{:>30} {}".format(k + ":", str(v).replace("\n", " ")) for k, v in info.items())