diff --git a/.asf.yaml b/.asf.yaml index ce0d702834bfa..8134fddbcd91a 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -122,14 +122,18 @@ github: # Max 10 collaborators allowed # https://github.com/apache/infrastructure-asfyaml/blob/main/README.md#assigning-the-github-triage-role-to-external-collaborators - aritra24 - - dirrao + - omkar-foss - rawwar - nathadfield - sunank200 - vatsrahul1001 - cmarteepants - - gopidesupavan - bugraoz93 + - briana-okyere notifications: jobs: jobs@airflow.apache.org + commits: commits@airflow.apache.org + issues: commits@airflow.apache.org + pullrequests: commits@airflow.apache.org + discussions: commits@airflow.apache.org diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c20fe916f92d4..4d52cba5cdd16 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -11,7 +11,7 @@ # Kubernetes /airflow/kubernetes/ @dstandish @jedcunningham -/airflow/providers/cncf/kubernetes/executors/ @dstandish @jedcunningham +/providers/src/airflow/providers/cncf/kubernetes/executors/ @dstandish @jedcunningham # Helm Chart /chart/ @dstandish @jedcunningham @hussein-awala @@ -61,27 +61,28 @@ /airflow/secrets @dstandish @potiuk @ashb # Providers -/airflow/providers/amazon/ @eladkal @o-nikolas -/airflow/providers/celery/ @hussein-awala -/airflow/providers/cncf/kubernetes @jedcunningham @hussein-awala -/airflow/providers/common/sql/ @eladkal -/airflow/providers/dbt/cloud/ @josh-fell -/airflow/providers/edge @jscheffl -/airflow/providers/hashicorp/ @hussein-awala -/airflow/providers/openlineage/ @mobuchowski -/airflow/providers/slack/ @eladkal -/airflow/providers/smtp/ @hussein-awala -/airflow/providers/snowflake/ @potiuk @mik-laj -/airflow/providers/tabular/ @Fokko +/providers/src/airflow/providers/amazon/ @eladkal @o-nikolas +/providers/src/airflow/providers/celery/ @hussein-awala +/providers/src/airflow/providers/cncf/kubernetes @jedcunningham @hussein-awala +/providers/src/airflow/providers/common/sql/ @eladkal +/providers/src/airflow/providers/dbt/cloud/ @josh-fell +/providers/src/airflow/providers/edge @jscheffl +/providers/src/airflow/providers/hashicorp/ @hussein-awala +/providers/src/airflow/providers/openlineage/ @mobuchowski +/providers/src/airflow/providers/slack/ @eladkal +/providers/src/airflow/providers/smtp/ @hussein-awala +/providers/src/airflow/providers/snowflake/ @potiuk @mik-laj +/providers/src/airflow/providers/apache/iceberg/ @Fokko +/providers/tests/apache/iceberg/ @Fokko /docs/apache-airflow-providers-amazon/ @eladkal @o-nikolas /docs/apache-airflow-providers-cncf-kubernetes @jedcunningham /docs/apache-airflow-providers-common-sql/ @eladkal /docs/apache-airflow-providers-openlineage/ @mobuchowski /docs/apache-airflow-providers-slack/ @eladkal -/tests/providers/amazon/ @eladkal @o-nikolas -/tests/providers/common/sql/ @eladkal -/tests/providers/openlineage/ @mobuchowski -/tests/providers/slack/ @eladkal +/providers/tests/amazon/ @eladkal @o-nikolas +/providers/tests/common/sql/ @eladkal +/providers/tests/openlineage/ @mobuchowski +/providers/tests/slack/ @eladkal /tests/system/providers/amazon/ @eladkal @o-nikolas # Dev tools @@ -108,6 +109,6 @@ ISSUE_TRIAGE_PROCESS.rst @eladkal # AIP-58 - Object Storage /airflow/io/ @bolkedebruin -/airflow/providers/**/fs/ @bolkedebruin -/airflow/providers/common/io/ @bolkedebruin +/providers/src/airflow/providers/**/fs/ @bolkedebruin +/providers/src/airflow/providers/common/io/ @bolkedebruin /docs/apache-airflow/core-concepts/objectstorage.rst @bolkedebruin diff --git a/.github/SECURITY.md b/.github/SECURITY.md index 4372b4528b477..4bcbd30dcad19 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -99,10 +99,11 @@ do not apply to Airflow, or have a different severity than some generic scoring The Airflow Security Team will get back to you after assessing the report. You will usually get confirmation that the issue is being worked (or that we quickly assessed it as invalid) within several -business days. Note that this is an Open-Source projects and members of the security team are volunteers -so please make sure to be patient. If you do not get a response within a week or so, please send a -kind reminder to the security team. We will usually let you know the CVE number that will be assigned -to the issue and the severity of the issue as well as release the issue is scheduled to be fixed +business days. Note that this is an Open-Source projects and members of the security team are volunteers, +so please make sure to be patient. If you do not get a response within a week, please send a kind reminder +to the security team about a lack of response; however, reminders should only be for the initial response +and not for updates on the assessment or remediation. We will usually let you know the CVE number that will +be assigned to the issue and the severity of the issue as well as release the issue is scheduled to be fixed after we assess the issue (which might take longer or shorter time depending on the issue complexity and potential impact, severity, whether we want to address a whole class issues in a single fix and a number of other factors). You should subscribe and monitor the `users@airflow.apache.org` mailing diff --git a/.github/actions/install-pre-commit/action.yml b/.github/actions/install-pre-commit/action.yml new file mode 100644 index 0000000000000..02eea2c722917 --- /dev/null +++ b/.github/actions/install-pre-commit/action.yml @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +--- +name: 'Install pre-commit' +description: 'Installs pre-commit and related packages' +inputs: + python-version: + description: 'Python version to use' + default: 3.9 + uv-version: + description: 'uv version to use' + default: 0.4.29 + pre-commit-version: + description: 'pre-commit version to use' + default: 4.0.1 + pre-commit-uv-version: + description: 'pre-commit-uv version to use' + default: 4.1.4 +runs: + using: "composite" + steps: + - name: Install pre-commit, uv, and pre-commit-uv + shell: bash + run: > + pip install + pre-commit==${{inputs.pre-commit-version}} + uv==${{inputs.uv-version}} + pre-commit-uv==${{inputs.pre-commit-uv-version}} + - name: Cache pre-commit envs + uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit + key: "pre-commit-${{inputs.python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: | + pre-commit-${{inputs.python-version}}- diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index e790d65e2fe6f..0a3fc240fc8d9 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -626,7 +626,7 @@ labelPRBasedOnFilePath: area:Scheduler: - airflow/jobs/**/* - - airflow/task/task_runner/**/* + - airflow/task/standard_task_runner.py - airflow/dag_processing/**/* - docs/apache-airflow/administration-and-deployment/scheduler.rst - tests/jobs/**/* diff --git a/.github/workflows/additional-ci-image-checks.yml b/.github/workflows/additional-ci-image-checks.yml index 878800324b784..8a3b46e70d37d 100644 --- a/.github/workflows/additional-ci-image-checks.yml +++ b/.github/workflows/additional-ci-image-checks.yml @@ -84,6 +84,10 @@ on: # yamllint disable-line rule:truthy description: "Whether to debug resources (true/false)" required: true type: string + use-uv: + description: "Whether to use uv to build the image (true/false)" + required: true + type: string jobs: # Push early BuildX cache to GitHub Registry in Apache repository, This cache does not wait for all the # tests to complete - it is run very early in the build process for "main" merges in order to refresh @@ -113,7 +117,7 @@ jobs: python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} - use-uv: "true" + use-uv: ${{ inputs.use-uv}} include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} @@ -170,7 +174,7 @@ jobs: # platform: "linux/arm64" # branch: ${{ inputs.branch }} # constraints-branch: ${{ inputs.constraints-branch }} -# use-uv: "true" +# use-uv: ${{ inputs.use-uv}} # upgrade-to-newer-dependencies: ${{ inputs.upgrade-to-newer-dependencies }} # docker-cache: ${{ inputs.docker-cache }} # disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index 509d7066f6d38..bf7e8ab7bf79b 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -201,11 +201,13 @@ jobs: --package-format wheel --skip-tag-check - name: "Install Airflow with fab for webserver tests" run: pip install . dist/apache_airflow_providers_fab-*.whl - - name: "Prepare Standard provider packages: wheel" - run: > - breeze release-management prepare-provider-packages standard --package-format wheel --skip-tag-check - name: "Install Airflow with standard provider for webserver tests" run: pip install . dist/apache_airflow_providers_standard-*.whl + - name: "Prepare Task SDK package: wheel" + run: > + breeze release-management prepare-task-sdk-package --package-format wheel + - name: "Install Task SDK package" + run: pip install ./dist/apache_airflow_task_sdk-*.whl - name: "Install Python client" run: pip install ./dist/apache_airflow_client-*.whl - name: "Initialize Airflow DB and start webserver" @@ -283,16 +285,11 @@ jobs: - name: "Install Breeze" uses: ./.github/actions/breeze id: breeze - - name: Cache pre-commit envs - uses: actions/cache@v4 + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit with: - path: ~/.cache/pre-commit - # yamllint disable-line rule:line-length - key: "pre-commit-${{steps.breeze.outputs.host-python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: "\ - pre-commit-${{steps.breeze.outputs.host-python-version}}-\ - ${{ hashFiles('.pre-commit-config.yaml') }}\n - pre-commit-${{steps.breeze.outputs.host-python-version}}-" + python-version: ${{steps.breeze.outputs.host-python-version}} - name: Fetch incoming commit ${{ github.sha }} with its parent uses: actions/checkout@v4 with: @@ -359,6 +356,8 @@ jobs: --hook-stage manual update-build-dependencies if: always() + env: + SKIP_TROVE_CLASSIFIERS_ONLY: "true" - name: "Run automated upgrade for chart dependencies" run: > pre-commit run diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 943b01f8f8916..6b1160d6f17db 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -72,6 +72,7 @@ jobs: docker-cache: ${{ steps.selective-checks.outputs.docker-cache }} default-branch: ${{ steps.selective-checks.outputs.default-branch }} disable-airflow-repo-cache: ${{ steps.selective-checks.outputs.disable-airflow-repo-cache }} + force-pip: ${{ steps.selective-checks.outputs.force-pip }} constraints-branch: ${{ steps.selective-checks.outputs.default-constraints-branch }} runs-on-as-json-default: ${{ steps.selective-checks.outputs.runs-on-as-json-default }} runs-on-as-json-public: ${{ steps.selective-checks.outputs.runs-on-as-json-public }} @@ -203,7 +204,7 @@ jobs: pull-request-target: "true" is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} push-image: "true" - use-uv: "true" + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} image-tag: ${{ needs.build-info.outputs.image-tag }} platform: "linux/amd64" python-versions: ${{ needs.build-info.outputs.python-versions }} @@ -248,7 +249,7 @@ jobs: pull-request-target: "true" is-committer-build: ${{ needs.build-info.outputs.is-committer-build }} push-image: "true" - use-uv: "true" + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} image-tag: ${{ needs.build-info.outputs.image-tag }} platform: linux/amd64 python-versions: ${{ needs.build-info.outputs.python-versions }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8a9d716cd8421..5a0f2ca6106f8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -77,6 +77,7 @@ jobs: default-mysql-version: ${{ steps.selective-checks.outputs.default-mysql-version }} default-helm-version: ${{ steps.selective-checks.outputs.default-helm-version }} default-kind-version: ${{ steps.selective-checks.outputs.default-kind-version }} + force-pip: ${{ steps.selective-checks.outputs.force-pip }} full-tests-needed: ${{ steps.selective-checks.outputs.full-tests-needed }} parallel-test-types-list-as-string: >- ${{ steps.selective-checks.outputs.parallel-test-types-list-as-string }} @@ -99,7 +100,7 @@ jobs: ci-image-build: ${{ steps.selective-checks.outputs.ci-image-build }} prod-image-build: ${{ steps.selective-checks.outputs.prod-image-build }} docs-build: ${{ steps.selective-checks.outputs.docs-build }} - mypy-folders: ${{ steps.selective-checks.outputs.mypy-folders }} + mypy-checks: ${{ steps.selective-checks.outputs.mypy-checks }} needs-mypy: ${{ steps.selective-checks.outputs.needs-mypy }} needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} @@ -205,7 +206,7 @@ jobs: platform: "linux/amd64" python-versions: ${{ needs.build-info.outputs.python-versions }} branch: ${{ needs.build-info.outputs.default-branch }} - use-uv: "true" + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} constraints-branch: ${{ needs.build-info.outputs.default-constraints-branch }} docker-cache: ${{ needs.build-info.outputs.docker-cache }} @@ -271,6 +272,7 @@ jobs: latest-versions-only: ${{ needs.build-info.outputs.latest-versions-only }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} generate-constraints: @@ -298,7 +300,7 @@ jobs: runs-on-as-json-docs-build: ${{ needs.build-info.outputs.runs-on-as-json-docs-build }} image-tag: ${{ needs.build-info.outputs.image-tag }} needs-mypy: ${{ needs.build-info.outputs.needs-mypy }} - mypy-folders: ${{ needs.build-info.outputs.mypy-folders }} + mypy-checks: ${{ needs.build-info.outputs.mypy-checks }} python-versions-list-as-string: ${{ needs.build-info.outputs.python-versions-list-as-string }} branch: ${{ needs.build-info.outputs.default-branch }} canary-run: ${{ needs.build-info.outputs.canary-run }} @@ -312,6 +314,7 @@ jobs: ci-image-build: ${{ needs.build-info.outputs.ci-image-build }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} + docs-build: ${{ needs.build-info.outputs.docs-build }} providers: name: "Provider checks" @@ -556,7 +559,7 @@ jobs: default-python-version: ${{ needs.build-info.outputs.default-python-version }} branch: ${{ needs.build-info.outputs.default-branch }} push-image: "true" - use-uv: "true" + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }} upgrade-to-newer-dependencies: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }} chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }} @@ -639,11 +642,30 @@ jobs: kubernetes-versions-list-as-string: ${{ needs.build-info.outputs.kubernetes-versions-list-as-string }} kubernetes-combos-list-as-string: ${{ needs.build-info.outputs.kubernetes-combos-list-as-string }} include-success-outputs: ${{ needs.build-info.outputs.include-success-outputs }} + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} debug-resources: ${{ needs.build-info.outputs.debug-resources }} if: > ( needs.build-info.outputs.run-kubernetes-tests == 'true' || needs.build-info.outputs.needs-helm-tests == 'true') + tests-task-sdk: + name: "Task SDK tests" + uses: ./.github/workflows/task-sdk-tests.yml + needs: [build-info, wait-for-ci-images] + permissions: + contents: read + packages: read + secrets: inherit + with: + runs-on-as-json-default: ${{ needs.build-info.outputs.runs-on-as-json-default }} + image-tag: ${{ needs.build-info.outputs.image-tag }} + default-python-version: ${{ needs.build-info.outputs.default-python-version }} + python-versions: ${{ needs.build-info.outputs.python-versions }} + run-task-sdk-tests: ${{ needs.build-info.outputs.run-task-sdk-tests }} + if: > + ( needs.build-info.outputs.run-task-sdk-tests == 'true' || + needs.build-info.outputs.run-tests == 'true') + finalize-tests: name: Finalize tests permissions: diff --git a/.github/workflows/finalize-tests.yml b/.github/workflows/finalize-tests.yml index 6fae105e0a646..6f9bc74168b42 100644 --- a/.github/workflows/finalize-tests.yml +++ b/.github/workflows/finalize-tests.yml @@ -149,7 +149,7 @@ jobs: python-versions: ${{ inputs.python-versions }} branch: ${{ inputs.branch }} constraints-branch: ${{ inputs.constraints-branch }} - use-uv: "true" + use-uv: ${{ needs.build-info.outputs.force-pip == 'true' && 'false' || 'true' }} include-success-outputs: ${{ inputs.include-success-outputs }} docker-cache: ${{ inputs.docker-cache }} disable-airflow-repo-cache: ${{ inputs.disable-airflow-repo-cache }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index e831350f5b186..530d0f9fc5636 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -59,6 +59,7 @@ on: # yamllint disable-line rule:truthy jobs: tests-integration: timeout-minutes: 130 + if: inputs.testable-integrations != '[]' name: "Integration Tests: ${{ matrix.integration }}" runs-on: ${{ fromJSON(inputs.runs-on-as-json-public) }} strategy: diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index c4b72a9afc924..3b3e067038db9 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -44,6 +44,10 @@ on: # yamllint disable-line rule:truthy description: "Whether to include success outputs" required: true type: string + use-uv: + description: "Whether to use uv" + required: true + type: string debug-resources: description: "Whether to debug resources" required: true @@ -96,6 +100,9 @@ jobs: key: "\ k8s-env-${{ steps.breeze.outputs.host-python-version }}-\ ${{ hashFiles('scripts/ci/kubernetes/k8s_requirements.txt','hatch_build.py') }}" + - name: "Switch breeze to use uv" + run: breeze setup config --use-uv + if: inputs.use-uv == 'true' - name: Run complete K8S tests ${{ inputs.kubernetes-combos-list-as-string }} run: breeze k8s run-complete-tests --run-in-parallel --upgrade --no-copy-local-sources env: diff --git a/.github/workflows/prod-image-build.yml b/.github/workflows/prod-image-build.yml index db80a6ec247ec..df4f24981ff30 100644 --- a/.github/workflows/prod-image-build.yml +++ b/.github/workflows/prod-image-build.yml @@ -181,6 +181,11 @@ jobs: run: > breeze release-management prepare-airflow-package --package-format wheel if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' + - name: "Prepare task-sdk package" + shell: bash + run: > + breeze release-management prepare-task-sdk-package --package-format wheel + if: inputs.do-build == 'true' && inputs.upload-package-artifact == 'true' - name: "Upload prepared packages as artifacts" uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/release_dockerhub_image.yml b/.github/workflows/release_dockerhub_image.yml index 100e850a6fd84..5ce1585131f76 100644 --- a/.github/workflows/release_dockerhub_image.yml +++ b/.github/workflows/release_dockerhub_image.yml @@ -85,6 +85,7 @@ jobs: "kaxil", "pierrejeambrun", "potiuk", + "utkarsharma2" ]'), github.event.sender.login) steps: - name: "Cleanup repo" diff --git a/.github/workflows/static-checks-mypy-docs.yml b/.github/workflows/static-checks-mypy-docs.yml index c875c1667cf9e..be2c4f8e28645 100644 --- a/.github/workflows/static-checks-mypy-docs.yml +++ b/.github/workflows/static-checks-mypy-docs.yml @@ -36,7 +36,7 @@ on: # yamllint disable-line rule:truthy description: "Whether to run mypy checks (true/false)" required: true type: string - mypy-folders: + mypy-checks: description: "List of folders to run mypy checks on" required: false type: string @@ -92,6 +92,10 @@ on: # yamllint disable-line rule:truthy description: "Whether to debug resources (true/false)" required: true type: string + docs-build: + description: "Whether to build docs (true/false)" + required: true + type: string jobs: static-checks: timeout-minutes: 45 @@ -122,14 +126,11 @@ jobs: - name: "Prepare breeze & CI image: ${{ inputs.default-python-version}}:${{ inputs.image-tag }}" uses: ./.github/actions/prepare_breeze_and_image id: breeze - - name: Cache pre-commit envs - uses: actions/cache@v4 + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit with: - path: ~/.cache/pre-commit - # yamllint disable-line rule:line-length - key: "pre-commit-${{steps.breeze.outputs.host-python-version}}-${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: | - pre-commit-${{steps.breeze.outputs.host-python-version}}- + python-version: ${{steps.breeze.outputs.host-python-version}} - name: "Static checks" run: breeze static-checks --all-files --show-diff-on-failure --color always --initialize-environment env: @@ -148,7 +149,7 @@ jobs: strategy: fail-fast: false matrix: - mypy-folder: ${{ fromJSON(inputs.mypy-folders) }} + mypy-check: ${{ fromJSON(inputs.mypy-checks) }} env: PYTHON_MAJOR_MINOR_VERSION: "${{inputs.default-python-version}}" IMAGE_TAG: "${{ inputs.image-tag }}" @@ -166,10 +167,13 @@ jobs: - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" uses: ./.github/actions/prepare_breeze_and_image id: breeze - - name: "MyPy checks for ${{ matrix.mypy-folder }}" - run: | - pip install pre-commit - pre-commit run --color always --verbose --hook-stage manual mypy-${{matrix.mypy-folder}} --all-files + - name: "Install pre-commit" + uses: ./.github/actions/install-pre-commit + id: pre-commit + with: + python-version: ${{steps.breeze.outputs.host-python-version}} + - name: "MyPy checks for ${{ matrix.mypy-check }}" + run: pre-commit run --color always --verbose --hook-stage manual ${{matrix.mypy-check}} --all-files env: VERBOSE: "false" COLUMNS: "250" @@ -182,6 +186,7 @@ jobs: timeout-minutes: 150 name: "Build documentation" runs-on: ${{ fromJSON(inputs.runs-on-as-json-default) }} + if: inputs.docs-build == 'true' strategy: fail-fast: false matrix: @@ -231,8 +236,6 @@ jobs: timeout-minutes: 150 name: "Publish documentation" needs: build-docs - # For canary runs we need to push documentation to AWS S3 and preparing it takes a lot of space - # So we should use self-hosted ASF runners for this runs-on: ${{ fromJSON(inputs.runs-on-as-json-docs-build) }} env: GITHUB_REPOSITORY: ${{ github.repository }} @@ -243,9 +246,7 @@ jobs: INCLUDE_SUCCESS_OUTPUTS: "${{ inputs.include-success-outputs }}" PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" - # Temporary disabled it until self-hosted ASF runners will be working again - if: false - # if: inputs.canary-run == 'true' && inputs.branch == 'main' + if: inputs.canary-run == 'true' && inputs.branch == 'main' steps: - name: "Cleanup repo" shell: bash @@ -261,16 +262,22 @@ jobs: with: name: airflow-docs path: './docs/_build' + - name: Check disk space available + run: df -h + - name: Create /mnt/airflow-site directory + run: sudo mkdir -p /mnt/airflow-site && sudo chown -R "${USER}" /mnt/airflow-site - name: "Clone airflow-site" run: > - git clone https://github.com/apache/airflow-site.git ${GITHUB_WORKSPACE}/airflow-site && - echo "AIRFLOW_SITE_DIRECTORY=${GITHUB_WORKSPACE}/airflow-site" >> "$GITHUB_ENV" + git clone https://github.com/apache/airflow-site.git /mnt/airflow-site/airflow-site && + echo "AIRFLOW_SITE_DIRECTORY=/mnt/airflow-site/airflow-site" >> "$GITHUB_ENV" - name: "Prepare breeze & CI image: ${{ inputs.default-python-version }}:${{ inputs.image-tag }}" uses: ./.github/actions/prepare_breeze_and_image - name: "Publish docs" run: > breeze release-management publish-docs --override-versioned --run-in-parallel ${{ inputs.docs-list-as-string }} + - name: Check disk space available + run: df -h - name: "Generate back references for providers" run: breeze release-management add-back-references all-providers - name: "Generate back references for apache-airflow" diff --git a/.github/workflows/task-sdk-tests.yml b/.github/workflows/task-sdk-tests.yml index 14fae903837c2..d1d152648cb8b 100644 --- a/.github/workflows/task-sdk-tests.yml +++ b/.github/workflows/task-sdk-tests.yml @@ -28,10 +28,6 @@ on: # yamllint disable-line rule:truthy description: "Tag to set for the image" required: true type: string - canary-run: - description: "Whether this is a canary run" - required: true - type: string default-python-version: description: "Which version of python should be used by default" required: true @@ -40,6 +36,10 @@ on: # yamllint disable-line rule:truthy description: "JSON-formatted array of Python versions to build images from" required: true type: string + run-task-sdk-tests: + description: "Whether to run Task SDK tests or not (true/false)" + required: true + type: string jobs: task-sdk-tests: timeout-minutes: 80 @@ -58,7 +58,6 @@ jobs: PYTHON_MAJOR_MINOR_VERSION: "${{ inputs.default-python-version }}" VERBOSE: "true" CLEAN_AIRFLOW_INSTALLATION: "${{ inputs.canary-run }}" - if: inputs.run-task-sdk-tests == 'true' steps: - name: "Cleanup repo" shell: bash @@ -81,11 +80,6 @@ jobs: pipx uninstall twine || true pipx install twine && twine check dist/*.whl - name: > - Run provider unit tests on - Airflow Task SDK:Python ${{ matrix.python-version }} - if: matrix.run-tests == 'true' + Run unit tests for Airflow Task SDK:Python ${{ matrix.python-version }} run: > - breeze testing tests --run-in-parallel - --parallel-test-types TaskSDK - --use-packages-from-dist - --package-format wheel + breeze testing task-sdk-tests --python "${{ matrix.python-version }}" diff --git a/.gitignore b/.gitignore index a9c055041d980..84afbd474102b 100644 --- a/.gitignore +++ b/.gitignore @@ -254,3 +254,6 @@ licenses/LICENSES-ui.txt # airflow-build-dockerfile and correconding ignore file airflow-build-dockerfile* + +# Temporary ignore uv.lock until we integrate it fully in our constraint preparation mechanism +/uv.lock diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 29762c19136c2..20c7a293bff06 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -168,12 +168,18 @@ repos: \.cfg$|\.conf$|\.ini$|\.ldif$|\.properties$|\.readthedocs$|\.service$|\.tf$|Dockerfile.*$ - repo: local hooks: + - id: check-min-python-version + name: Check minimum Python version + entry: ./scripts/ci/pre_commit/check_min_python_version.py + language: python + additional_dependencies: ['rich>=12.4.4'] + require_serial: true - id: update-common-sql-api-stubs name: Check and update common.sql API stubs entry: ./scripts/ci/pre_commit/update_common_sql_api_stubs.py language: python files: ^scripts/ci/pre_commit/update_common_sql_api\.py|^providers/src/airflow/providers/common/sql/.*\.pyi?$ - additional_dependencies: ['rich>=12.4.4', 'mypy==1.9.0', 'black==23.10.0', 'jinja2'] + additional_dependencies: ['rich>=12.4.4', 'mypy==1.9.0', 'black==24.10.0', 'jinja2'] pass_filenames: false require_serial: true - id: update-black-version @@ -193,6 +199,7 @@ repos: files: ^.pre-commit-config.yaml$|^scripts/ci/pre_commit/update_build_dependencies.py$ pass_filenames: false require_serial: true + additional_dependencies: ['rich>=12.4.4'] - id: update-installers name: Update installers to latest (manual) entry: ./scripts/ci/pre_commit/update_installers.py @@ -227,18 +234,18 @@ repos: additional_dependencies: ["libcst>=1.1.0"] files: ^(providers/src/)?airflow/.*/(sensors|operators)/.*\.py$ - repo: https://github.com/asottile/blacken-docs - rev: 1.19.0 + rev: 1.19.1 hooks: - id: blacken-docs name: Run black on docs args: - --line-length=110 - - --target-version=py37 - - --target-version=py38 - --target-version=py39 - --target-version=py310 + - --target-version=py311 + - --target-version=py312 alias: blacken-docs - additional_dependencies: [black==23.10.0] + additional_dependencies: [black==24.10.0] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: @@ -353,7 +360,7 @@ repos: types_or: [python, pyi] args: [--fix] require_serial: true - additional_dependencies: ["ruff==0.5.5"] + additional_dependencies: ["ruff==0.7.1"] exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^performance/tests/test_.*.py - id: ruff-format name: Run 'ruff format' @@ -363,7 +370,7 @@ repos: types_or: [python, pyi] args: [] require_serial: true - additional_dependencies: ["ruff==0.5.5"] + additional_dependencies: ["ruff==0.7.1"] exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py$ - id: replace-bad-characters name: Replace bad characters @@ -651,6 +658,8 @@ repos: ^docs/exts/removemarktransform.py$| ^newsfragments/41761.significant.rst$| ^scripts/ci/pre_commit/vendor_k8s_json_schema.py$| + ^scripts/ci/docker-compose/integration-keycloak.yml$| + ^scripts/ci/docker-compose/keycloak/keycloak-entrypoint.sh$| ^tests/| ^providers/tests/| ^.pre-commit-config\.yaml$| @@ -715,7 +724,7 @@ repos: files: > (?x) ^providers/src/airflow/providers/.*\.py$ - exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py + exclude: ^.*/.*_vendor/|providers/src/airflow/providers/standard/operators/bash.py|providers/src/airflow/providers/standard/operators/python.py - id: check-get-lineage-collector-providers language: python name: Check providers import hook lineage code from compat @@ -897,7 +906,7 @@ repos: - --max-length=53 language: python files: ^\.pre-commit-config\.yaml$|^scripts/ci/pre_commit/check_pre_commit_hooks\.py$ - additional_dependencies: ['pyyaml', 'jinja2', 'black==23.10.0', 'tabulate', 'rich>=12.4.4'] + additional_dependencies: ['pyyaml', 'jinja2', 'black==24.10.0', 'tabulate', 'rich>=12.4.4'] require_serial: true pass_filenames: false - id: check-integrations-list-consistent @@ -905,7 +914,7 @@ repos: entry: ./scripts/ci/pre_commit/check_integrations_list.py language: python files: ^scripts/ci/docker-compose/integration-.*\.yml$|^contributing-docs/testing/integration_tests.rst$ - additional_dependencies: ['black==23.10.0', 'tabulate', 'rich>=12.4.4', 'pyyaml'] + additional_dependencies: ['black==24.10.0', 'tabulate', 'rich>=12.4.4', 'pyyaml'] require_serial: true pass_filenames: false - id: update-breeze-readme-config-hash @@ -973,7 +982,7 @@ repos: files: .*\.schema\.json$ exclude: ^.*/.*_vendor/ require_serial: true - additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==5.3.1', 'requests==2.25.0'] + additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: lint-json-schema name: Lint NodePort Service entry: ./scripts/ci/pre_commit/json_schema.py @@ -984,7 +993,7 @@ repos: pass_filenames: true files: ^scripts/ci/kubernetes/nodeport\.yaml$ require_serial: true - additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==5.3.1', 'requests==2.25.0'] + additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: lint-json-schema name: Lint Docker compose files entry: ./scripts/ci/pre_commit/json_schema.py @@ -999,7 +1008,7 @@ repos: ^scripts/ci/docker-compose/grafana/.| ^scripts/ci/docker-compose/.+-config\.ya?ml require_serial: true - additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==5.3.1', 'requests==2.25.0'] + additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: lint-json-schema name: Lint chart/values.schema.json entry: ./scripts/ci/pre_commit/json_schema.py @@ -1011,13 +1020,13 @@ repos: pass_filenames: false files: ^chart/values\.schema\.json$|^chart/values_schema\.schema\.json$ require_serial: true - additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==5.3.1', 'requests==2.25.0'] + additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: update-vendored-in-k8s-json-schema name: Vendor k8s definitions into values.schema.json entry: ./scripts/ci/pre_commit/vendor_k8s_json_schema.py language: python files: ^chart/values\.schema\.json$ - additional_dependencies: ['requests==2.25.0'] + additional_dependencies: ['requests==2.32.3'] - id: lint-json-schema name: Lint chart/values.yaml entry: ./scripts/ci/pre_commit/json_schema.py @@ -1030,7 +1039,7 @@ repos: pass_filenames: false files: ^chart/values\.yaml$|^chart/values\.schema\.json$ require_serial: true - additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==5.3.1', 'requests==2.25.0'] + additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: lint-json-schema name: Lint config_templates/config.yml entry: ./scripts/ci/pre_commit/json_schema.py @@ -1041,7 +1050,7 @@ repos: pass_filenames: true files: ^airflow/config_templates/config\.yml$ require_serial: true - additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==5.3.1', 'requests==2.25.0'] + additional_dependencies: ['jsonschema>=3.2.0,<5.0', 'PyYAML==6.0.2', 'requests==2.32.3'] - id: check-persist-credentials-disabled-in-github-workflows name: Check persistent creds in workflow files description: Check that workflow files have persist-credentials disabled @@ -1182,6 +1191,8 @@ repos: ^airflow/utils/helpers.py$ | ^providers/src/airflow/providers/ | ^(providers/)?tests/ | + task_sdk/src/airflow/sdk/definitions/dag.py$ | + task_sdk/src/airflow/sdk/definitions/node.py$ | ^dev/.*\.py$ | ^scripts/.*\.py$ | ^docker_tests/.*$ | @@ -1291,6 +1302,7 @@ repos: ^.*/.*_vendor/ | ^airflow/migrations | ^providers/ | + ^task_sdk/ | ^dev | ^scripts | ^docs | @@ -1343,6 +1355,23 @@ repos: files: ^.*\.py$ require_serial: true additional_dependencies: ['rich>=12.4.4'] + - id: mypy-task-sdk + name: Run mypy for Task SDK + language: python + entry: ./scripts/ci/pre_commit/mypy.py --namespace-packages + files: ^task_sdk/src/airflow/sdk/.*\.py$|^task_sdk/tests//.*\.py$ + exclude: ^.*/.*_vendor/ + require_serial: true + additional_dependencies: ['rich>=12.4.4'] + - id: mypy-task-sdk + stages: ['manual'] + name: Run mypy for Task SDK (manual) + language: python + entry: ./scripts/ci/pre_commit/mypy_folder.py task_sdk/src/airflow/sdk + pass_filenames: false + files: ^.*\.py$ + require_serial: true + additional_dependencies: ['rich>=12.4.4'] - id: check-provider-yaml-valid name: Validate provider.yaml files entry: ./scripts/ci/pre_commit/check_provider_yaml_files.py diff --git a/Dockerfile b/Dockerfile index 796e63be39cca..25ceb86307766 100644 --- a/Dockerfile +++ b/Dockerfile @@ -36,7 +36,7 @@ # much smaller. # # Use the same builder frontend version for everyone -ARG AIRFLOW_EXTRAS="aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,docker,elasticsearch,fab,ftp,google,google-auth,graphviz,grpc,hashicorp,http,ldap,microsoft-azure,mysql,odbc,openlineage,pandas,postgres,redis,sendgrid,sftp,slack,snowflake,ssh,statsd,uv,virtualenv" +ARG AIRFLOW_EXTRAS="aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,docker,elasticsearch,fab,ftp,google,google-auth,graphviz,grpc,hashicorp,http,ldap,microsoft-azure,mysql,odbc,openlineage,pandas,postgres,redis,sendgrid,sftp,slack,snowflake,ssh,statsd,uv" ARG ADDITIONAL_AIRFLOW_EXTRAS="" ARG ADDITIONAL_PYTHON_DEPS="" @@ -49,8 +49,13 @@ ARG AIRFLOW_VERSION="2.10.2" ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm" -ARG AIRFLOW_PIP_VERSION=24.2 -ARG AIRFLOW_UV_VERSION=0.4.24 + +# You can swap comments between those two args to test pip from the main version +# When you attempt to test if the version of `pip` from specified branch works for our builds +# Also use `force pip` label on your PR to swap all places we use `uv` to `pip` +ARG AIRFLOW_PIP_VERSION=24.3.1 +# ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" +ARG AIRFLOW_UV_VERSION=0.4.29 ARG AIRFLOW_USE_UV="false" ARG UV_HTTP_TIMEOUT="300" ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" @@ -615,7 +620,7 @@ function common::install_packaging_tools() { echo "${COLOR_BLUE}Installing latest pip version${COLOR_RESET}" echo pip install --root-user-action ignore --disable-pip-version-check --upgrade pip - elif [[ ! ${AIRFLOW_PIP_VERSION} =~ [0-9.]* ]]; then + elif [[ ! ${AIRFLOW_PIP_VERSION} =~ ^[0-9].* ]]; then echo echo "${COLOR_BLUE}Installing pip version from spec ${AIRFLOW_PIP_VERSION}${COLOR_RESET}" echo @@ -628,7 +633,6 @@ function common::install_packaging_tools() { echo echo "${COLOR_BLUE}(Re)Installing pip version: ${AIRFLOW_PIP_VERSION}${COLOR_RESET}" echo - # shellcheck disable=SC2086 pip install --root-user-action ignore --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" fi fi @@ -637,7 +641,7 @@ function common::install_packaging_tools() { echo "${COLOR_BLUE}Installing latest uv version${COLOR_RESET}" echo pip install --root-user-action ignore --disable-pip-version-check --upgrade uv - elif [[ ! ${AIRFLOW_UV_VERSION} =~ [0-9.]* ]]; then + elif [[ ! ${AIRFLOW_UV_VERSION} =~ ^[0-9].* ]]; then echo echo "${COLOR_BLUE}Installing uv version from spec ${AIRFLOW_UV_VERSION}${COLOR_RESET}" echo @@ -714,6 +718,7 @@ COPY <<"EOF" /install_from_docker_context_files.sh function install_airflow_and_providers_from_docker_context_files(){ + local flags=() if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} fi @@ -752,10 +757,10 @@ function install_airflow_and_providers_from_docker_context_files(){ install_airflow_package=("apache-airflow[${AIRFLOW_EXTRAS}]==${AIRFLOW_VERSION}") fi - # Find Provider packages in docker-context files - readarray -t installing_providers_packages< <(python /scripts/docker/get_package_specs.py /docker-context-files/apache?airflow?providers*.{whl,tar.gz} 2>/dev/null || true) + # Find Provider/TaskSDK packages in docker-context files + readarray -t airflow_packages< <(python /scripts/docker/get_package_specs.py /docker-context-files/apache?airflow?{providers,task?sdk}*.{whl,tar.gz} 2>/dev/null || true) echo - echo "${COLOR_BLUE}Found provider packages in docker-context-files folder: ${installing_providers_packages[*]}${COLOR_RESET}" + echo "${COLOR_BLUE}Found provider packages in docker-context-files folder: ${airflow_packages[*]}${COLOR_RESET}" echo if [[ ${USE_CONSTRAINTS_FOR_CONTEXT_PACKAGES=} == "true" ]]; then @@ -768,11 +773,7 @@ function install_airflow_and_providers_from_docker_context_files(){ echo "${COLOR_BLUE}Installing docker-context-files packages with constraints found in ${local_constraints_file}${COLOR_RESET}" echo # force reinstall all airflow + provider packages with constraints found in - set -x - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade \ - ${ADDITIONAL_PIP_INSTALL_FLAGS} --constraint "${local_constraints_file}" \ - "${install_airflow_package[@]}" "${installing_providers_packages[@]}" - set +x + flags=(--upgrade --constraint "${local_constraints_file}") echo echo "${COLOR_BLUE}Copying ${local_constraints_file} to ${HOME}/constraints.txt${COLOR_RESET}" echo @@ -781,23 +782,21 @@ function install_airflow_and_providers_from_docker_context_files(){ echo echo "${COLOR_BLUE}Installing docker-context-files packages with constraints from GitHub${COLOR_RESET}" echo - set -x - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \ - ${ADDITIONAL_PIP_INSTALL_FLAGS} \ - --constraint "${HOME}/constraints.txt" \ - "${install_airflow_package[@]}" "${installing_providers_packages[@]}" - set +x + flags=(--constraint "${HOME}/constraints.txt") fi else echo echo "${COLOR_BLUE}Installing docker-context-files packages without constraints${COLOR_RESET}" echo - set -x - ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \ - ${ADDITIONAL_PIP_INSTALL_FLAGS} \ - "${install_airflow_package[@]}" "${installing_providers_packages[@]}" - set +x + flags=() fi + + set -x + ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \ + ${ADDITIONAL_PIP_INSTALL_FLAGS} \ + "${flags[@]}" \ + "${install_airflow_package[@]}" "${airflow_packages[@]}" + set +x common::install_packaging_tools pip check } diff --git a/Dockerfile.ci b/Dockerfile.ci index 826d7109ed551..6ddf2f4e1ac4d 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -561,7 +561,7 @@ function common::install_packaging_tools() { echo "${COLOR_BLUE}Installing latest pip version${COLOR_RESET}" echo pip install --root-user-action ignore --disable-pip-version-check --upgrade pip - elif [[ ! ${AIRFLOW_PIP_VERSION} =~ [0-9.]* ]]; then + elif [[ ! ${AIRFLOW_PIP_VERSION} =~ ^[0-9].* ]]; then echo echo "${COLOR_BLUE}Installing pip version from spec ${AIRFLOW_PIP_VERSION}${COLOR_RESET}" echo @@ -574,7 +574,6 @@ function common::install_packaging_tools() { echo echo "${COLOR_BLUE}(Re)Installing pip version: ${AIRFLOW_PIP_VERSION}${COLOR_RESET}" echo - # shellcheck disable=SC2086 pip install --root-user-action ignore --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" fi fi @@ -583,7 +582,7 @@ function common::install_packaging_tools() { echo "${COLOR_BLUE}Installing latest uv version${COLOR_RESET}" echo pip install --root-user-action ignore --disable-pip-version-check --upgrade uv - elif [[ ! ${AIRFLOW_UV_VERSION} =~ [0-9.]* ]]; then + elif [[ ! ${AIRFLOW_UV_VERSION} =~ ^[0-9].* ]]; then echo echo "${COLOR_BLUE}Installing uv version from spec ${AIRFLOW_UV_VERSION}${COLOR_RESET}" echo @@ -1018,6 +1017,13 @@ function determine_airflow_to_use() { --constraint https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt # Some packages might leave legacy typing module which causes test issues pip uninstall -y typing || true + if [[ ${LINK_PROVIDERS_TO_AIRFLOW_PACKAGE=} == "true" ]]; then + echo + echo "${COLOR_BLUE}Linking providers to airflow package as we are using them from mounted sources.${COLOR_RESET}" + echo + rm -rf /usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow/providers + ln -s "${AIRFLOW_SOURCES}/providers/src/airflow/providers" "/usr/local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow/providers" + fi fi if [[ "${USE_AIRFLOW_VERSION}" =~ ^2\.2\..*|^2\.1\..*|^2\.0\..* && "${AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=}" != "" ]]; then @@ -1155,7 +1161,7 @@ function check_force_lowest_dependencies() { echo fi set -x - uv pip install --python "$(which python)" --resolution lowest-direct --upgrade --editable ".${EXTRA}" + uv pip install --python "$(which python)" --resolution lowest-direct --upgrade --editable ".${EXTRA}" --editable "./task_sdk" set +x } @@ -1195,7 +1201,7 @@ ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" # NOTE! When you want to make sure dependencies are installed from scratch in your PR after removing # some dependencies, you also need to set "disable image cache" in your PR to make sure the image is # not built using the "main" version of those dependencies. -ARG DEPENDENCIES_EPOCH_NUMBER="11" +ARG DEPENDENCIES_EPOCH_NUMBER="13" # Make sure noninteractive debian install is used and language variables set ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ @@ -1362,8 +1368,12 @@ RUN bash /scripts/docker/install_packaging_tools.sh; \ # Here we fix the versions so all subsequent commands will use the versions # from the sources -ARG AIRFLOW_PIP_VERSION=24.2 -ARG AIRFLOW_UV_VERSION=0.4.24 +# You can swap comments between those two args to test pip from the main version +# When you attempt to test if the version of `pip` from specified branch works for our builds +# Also use `force pip` label on your PR to swap all places we use `uv` to `pip` +ARG AIRFLOW_PIP_VERSION=24.3.1 +# ARG AIRFLOW_PIP_VERSION="git+https://github.com/pypa/pip.git@main" +ARG AIRFLOW_UV_VERSION=0.4.29 ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} @@ -1385,6 +1395,7 @@ RUN bash /scripts/docker/install_packaging_tools.sh; \ COPY pyproject.toml ${AIRFLOW_SOURCES}/pyproject.toml COPY providers/pyproject.toml ${AIRFLOW_SOURCES}/providers/pyproject.toml COPY task_sdk/pyproject.toml ${AIRFLOW_SOURCES}/task_sdk/pyproject.toml +COPY task_sdk/README.md ${AIRFLOW_SOURCES}/task_sdk/README.md COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ COPY tests_common/ ${AIRFLOW_SOURCES}/tests_common/ COPY generated/* ${AIRFLOW_SOURCES}/generated/ diff --git a/INSTALL b/INSTALL index 6583d9de44206..edf738f5e114d 100644 --- a/INSTALL +++ b/INSTALL @@ -93,6 +93,10 @@ or you can install all packages needed to run tests for core, providers, and all You can see the list of all available extras below. +Additionally when you want to develop providers you need to install providers code in editable mode: + + pip install -e "./providers" + # Using Hatch to manage your Python, virtualenvs, and build packages Airflow uses [hatch](https://hatch.pypa.io/) as a build and development tool. It is one of the popular @@ -255,8 +259,7 @@ Those extras are available as regular core airflow extras - they install optiona # START CORE EXTRAS HERE aiobotocore, apache-atlas, apache-webhdfs, async, cgroups, cloudpickle, github-enterprise, google- -auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, rabbitmq, s3fs, sentry, statsd, uv, -virtualenv +auth, graphviz, kerberos, ldap, leveldb, otel, pandas, password, rabbitmq, s3fs, sentry, statsd, uv # END CORE EXTRAS HERE diff --git a/INTHEWILD.md b/INTHEWILD.md index 310d018b98329..bdcb4973d78cb 100644 --- a/INTHEWILD.md +++ b/INTHEWILD.md @@ -158,6 +158,7 @@ Currently, **officially** using Airflow: 1. [Cryptalizer.com](https://www.cryptalizer.com/) 1. [Currency](https://www.gocurrency.com/) [[@FCLI](https://github.com/FCLI) & [@alexbegg](https://github.com/alexbegg)] 1. [Custom Ink](https://www.customink.com/) [[@david-dalisay](https://github.com/david-dalisay), [@dmartin11](https://github.com/dmartin11) & [@mpeteuil](https://github.com/mpeteuil)] +1. [Cyberdino](https://www.cyberdino.io) [[@cyberdino-io](https://github.com/cyberdino-io)] 1. [Cyscale](https://cyscale.com) [[@ocical](https://github.com/ocical)] 1. [Dailymotion](http://www.dailymotion.com/fr) [[@germaintanguy](https://github.com/germaintanguy) & [@hc](https://github.com/hc)] 1. [DANA](https://www.dana.id/) [[@imamdigmi](https://github.com/imamdigmi)] @@ -383,7 +384,7 @@ Currently, **officially** using Airflow: 1. [Paxful](https://paxful.com) [[@ne1r0n](https://github.com/ne1r0n)] 1. [PayFit](https://payfit.com) [[@pcorbel](https://github.com/pcorbel)] 1. [PAYMILL](https://www.paymill.com/) [[@paymill](https://github.com/paymill) & [@matthiashuschle](https://github.com/matthiashuschle)] -1. [PayPal](https://www.paypal.com/) [[@r39132](https://github.com/r39132) & [@jhsenjaliya](https://github.com/jhsenjaliya)] +1. [PayPal](https://www.paypal.com/) [[@kaddynator](https://github.com/kaddynator), [@r39132](https://github.com/r39132) & [@jhsenjaliya](https://github.com/jhsenjaliya)] 1. [Pecan](https://www.pecan.ai) [[@ohadmata](https://github.com/ohadmata)] 1. [Pernod-Ricard](https://www.pernod-ricard.com/) [[@romain-nio](https://github.com/romain-nio)] 1. [PEXA](https://www.pexa.com.au/) [[@andriyfedorov](https://github.com/andriyfedorov)] diff --git a/ISSUE_TRIAGE_PROCESS.rst b/ISSUE_TRIAGE_PROCESS.rst index a80c431afb52a..44ba3d33fee40 100644 --- a/ISSUE_TRIAGE_PROCESS.rst +++ b/ISSUE_TRIAGE_PROCESS.rst @@ -170,18 +170,17 @@ and ``area:providers``. This is especially important since these are now being released and versioned independently. There are more detailed areas of the Core Airflow project such as Scheduler, Webserver, -API, UI, Logging, and Kubernetes, which are all conceptually under the +API, UI, Logging, and Metrics, which are all conceptually under the "Airflow Core" area of the project. Similarly within Airflow Providers, the larger providers such as Apache, AWS, Azure, and Google who have many hooks and operators within them, have labels directly -associated with them such as ``provider:Apache``, ``provider:AWS``, -``provider:Azure``, and ``provider:Google``. +associated with them such as ``provider:amazon-aws``, ``provider:microsoft-azure``, and ``provider:google``. These make it easier for developers working on a single provider to track issues for that provider. -Some provider labels may couple several providers for example: ``provider:Protocols`` +Note: each provider has it's own unique label. It is possible for issue to be tagged with more than 1 provider label. Most issues need a combination of "kind" and "area" labels to be actionable. For example: diff --git a/README.md b/README.md index 0419ae0456070..01408d1dbb7ab 100644 --- a/README.md +++ b/README.md @@ -219,7 +219,7 @@ Those are - in the order of most common ways people install Airflow: - [PyPI releases](https://pypi.org/project/apache-airflow/) to install Airflow using standard `pip` tool - [Docker Images](https://hub.docker.com/r/apache/airflow) to install airflow via `docker` tool, use them in Kubernetes, Helm Charts, `docker-compose`, `docker swarm`, etc. You can - read more about using, customising, and extending the images in the + read more about using, customizing, and extending the images in the [Latest docs](https://airflow.apache.org/docs/docker-stack/index.html), and learn details on the internals in the [images](https://airflow.apache.org/docs/docker-stack/index.html) document. - [Tags in GitHub](https://github.com/apache/airflow/tags) to retrieve the git project sources that diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index c2408bd63f504..f44c8a0d139d2 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -3251,8 +3251,7 @@ And to mark a task as producing a dataset pass the dataset(s) to the ``outlets`` .. code-block:: python @task(outlets=[dataset]) - def my_task(): - ... + def my_task(): ... # Or for classic operators @@ -3286,8 +3285,7 @@ Previously you had to assign a DAG to a module-level variable in order for Airfl @dag - def dag_maker(): - ... + def dag_maker(): ... dag2 = dag_maker() @@ -3302,8 +3300,7 @@ can become @dag - def dag_maker(): - ... + def dag_maker(): ... dag_maker() @@ -3634,13 +3631,11 @@ For example, in your ``custom_config.py``: # before - class YourCustomFormatter(logging.Formatter): - ... + class YourCustomFormatter(logging.Formatter): ... # after - class YourCustomFormatter(TimezoneAware): - ... + class YourCustomFormatter(TimezoneAware): ... AIRFLOW_FORMATTER = LOGGING_CONFIG["formatters"]["airflow"] @@ -6330,27 +6325,22 @@ The old syntax of passing ``context`` as a dictionary will continue to work with .. code-block:: python - def execution_date_fn(execution_date, ctx): - ... + def execution_date_fn(execution_date, ctx): ... ``execution_date_fn`` can take in any number of keyword arguments available in the task context dictionary. The following forms of ``execution_date_fn`` are all supported: .. code-block:: python - def execution_date_fn(dt): - ... + def execution_date_fn(dt): ... - def execution_date_fn(execution_date): - ... + def execution_date_fn(execution_date): ... - def execution_date_fn(execution_date, ds_nodash): - ... + def execution_date_fn(execution_date, ds_nodash): ... - def execution_date_fn(execution_date, ds_nodash, dag): - ... + def execution_date_fn(execution_date, ds_nodash, dag): ... The default value for ``[webserver] cookie_samesite`` has been changed to ``Lax`` """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -7077,8 +7067,7 @@ Previous signature: external_trigger=False, conf=None, session=None, - ): - ... + ): ... current: @@ -7094,8 +7083,7 @@ current: conf=None, run_type=None, session=None, - ): - ... + ): ... If user provides ``run_id`` then the ``run_type`` will be derived from it by checking prefix, allowed types : ``manual``\ , ``scheduled``\ , ``backfill`` (defined by ``airflow.utils.types.DagRunType``\ ). @@ -7193,8 +7181,9 @@ can be replaced by the following code: logger = logging.getLogger("custom-logger") - with redirect_stdout(StreamLogWriter(logger, logging.INFO)), redirect_stderr( - StreamLogWriter(logger, logging.WARN) + with ( + redirect_stdout(StreamLogWriter(logger, logging.INFO)), + redirect_stderr(StreamLogWriter(logger, logging.WARN)), ): print("I Love Airflow") @@ -7223,8 +7212,7 @@ are deprecated and will be removed in future versions. include_examples=conf.getboolean("core", "LOAD_EXAMPLES"), safe_mode=conf.getboolean("core", "DAG_DISCOVERY_SAFE_MODE"), store_serialized_dags=False, - ): - ... + ): ... **current**\ : @@ -7235,8 +7223,7 @@ are deprecated and will be removed in future versions. include_examples=conf.getboolean("core", "LOAD_EXAMPLES"), safe_mode=conf.getboolean("core", "DAG_DISCOVERY_SAFE_MODE"), read_dags_from_db=False, - ): - ... + ): ... If you were using positional arguments, it requires no change but if you were using keyword arguments, please change ``store_serialized_dags`` to ``read_dags_from_db``. @@ -8058,8 +8045,7 @@ Before: dataset_id: str, dataset_resource: dict, # ... - ): - ... + ): ... After: @@ -8069,8 +8055,7 @@ After: dataset_resource: dict, dataset_id: Optional[str] = None, # ... - ): - ... + ): ... Changes in ``amazon`` provider package """""""""""""""""""""""""""""""""""""""""" @@ -10150,16 +10135,14 @@ Old signature: .. code-block:: python - def get_task_instances(self, session, start_date=None, end_date=None): - ... + def get_task_instances(self, session, start_date=None, end_date=None): ... New signature: .. code-block:: python @provide_session - def get_task_instances(self, start_date=None, end_date=None, session=None): - ... + def get_task_instances(self, start_date=None, end_date=None, session=None): ... For ``DAG`` ~~~~~~~~~~~~~~~ @@ -10168,16 +10151,14 @@ Old signature: .. code-block:: python - def get_task_instances(self, session, start_date=None, end_date=None, state=None): - ... + def get_task_instances(self, session, start_date=None, end_date=None, state=None): ... New signature: .. code-block:: python @provide_session - def get_task_instances(self, start_date=None, end_date=None, state=None, session=None): - ... + def get_task_instances(self, start_date=None, end_date=None, state=None, session=None): ... In either case, it is necessary to rewrite calls to the ``get_task_instances`` method that currently provide the ``session`` positional argument. New calls to this method look like: @@ -10658,15 +10639,13 @@ Old signature: .. code-block:: python - def create_transfer_job(self, description, schedule, transfer_spec, project_id=None): - ... + def create_transfer_job(self, description, schedule, transfer_spec, project_id=None): ... New signature: .. code-block:: python - def create_transfer_job(self, body): - ... + def create_transfer_job(self, body): ... It is necessary to rewrite calls to method. The new call looks like this: @@ -10691,15 +10670,13 @@ Old signature: .. code-block:: python - def wait_for_transfer_job(self, job): - ... + def wait_for_transfer_job(self, job): ... New signature: .. code-block:: python - def wait_for_transfer_job(self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,)): - ... + def wait_for_transfer_job(self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,)): ... The behavior of ``wait_for_transfer_job`` has changed: diff --git a/airflow/__init__.py b/airflow/__init__.py index f6c40b5091bbc..411aac70fc6f7 100644 --- a/airflow/__init__.py +++ b/airflow/__init__.py @@ -15,7 +15,10 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from __future__ import annotations + +# We do not use "from __future__ import annotations" here because it is not supported +# by Pycharm when we want to make sure all imports in airflow work from namespace packages +# Adding it automatically is excluded in pyproject.toml via I002 ruff rule exclusion # Make `airflow` a namespace package, supporting installing # airflow.providers.* in different locations (i.e. one in site, and one in user @@ -62,6 +65,8 @@ "DAG", "Asset", "XComArg", + # TODO: Remove this module in Airflow 3.2 + "Dataset", ] # Perform side-effects unless someone has explicitly opted out before import @@ -80,12 +85,13 @@ "version": (".version", "", False), # Deprecated lazy imports "AirflowException": (".exceptions", "AirflowException", True), + "Dataset": (".assets", "Dataset", True), } if TYPE_CHECKING: # These objects are imported by PEP-562, however, static analyzers and IDE's # have no idea about typing of these objects. # Add it under TYPE_CHECKING block should help with it. - from airflow.models.asset import Asset + from airflow.assets import Asset, Dataset from airflow.models.dag import DAG from airflow.models.xcom_arg import XComArg @@ -94,15 +100,6 @@ def __getattr__(name: str): # PEP-562: Lazy loaded attributes on python modules module_path, attr_name, deprecated = __lazy_imports.get(name, ("", "", False)) if not module_path: - if name.startswith("PY3") and (py_minor := name[3:]) in ("6", "7", "8", "9", "10", "11", "12"): - warnings.warn( - f"Python version constraint {name!r} is deprecated and will be removed in the future. " - f"Please get version info from the 'sys.version_info'.", - DeprecationWarning, - stacklevel=2, - ) - return sys.version_info >= (3, int(py_minor)) - raise AttributeError(f"module {__name__!r} has no attribute {name!r}") elif deprecated: warnings.warn( diff --git a/airflow/api/__init__.py b/airflow/api/__init__.py index d0613bb651faa..10c1ce6cea3c3 100644 --- a/airflow/api/__init__.py +++ b/airflow/api/__init__.py @@ -23,18 +23,14 @@ from importlib import import_module from airflow.configuration import conf -from airflow.exceptions import AirflowConfigException, AirflowException +from airflow.exceptions import AirflowException log = logging.getLogger(__name__) def load_auth(): """Load authentication backends.""" - auth_backends = "airflow.api.auth.backend.default" - try: - auth_backends = conf.get("api", "auth_backends") - except AirflowConfigException: - pass + auth_backends = conf.get("api", "auth_backends") backends = [] try: diff --git a/airflow/api/common/mark_tasks.py b/airflow/api/common/mark_tasks.py index a828d140c9769..957e82e7de491 100644 --- a/airflow/api/common/mark_tasks.py +++ b/airflow/api/common/mark_tasks.py @@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Collection, Iterable, Iterator, NamedTuple -from sqlalchemy import or_, select +from sqlalchemy import and_, or_, select from sqlalchemy.orm import lazyload from airflow.models.dagrun import DagRun @@ -402,8 +402,13 @@ def set_dag_run_state_to_failed( select(TaskInstance).filter( TaskInstance.dag_id == dag.dag_id, TaskInstance.run_id == run_id, - TaskInstance.state.not_in(State.finished), - TaskInstance.state.not_in(running_states), + or_( + TaskInstance.state.is_(None), + and_( + TaskInstance.state.not_in(State.finished), + TaskInstance.state.not_in(running_states), + ), + ), ) ).all() diff --git a/airflow/api_connexion/endpoints/asset_endpoint.py b/airflow/api_connexion/endpoints/asset_endpoint.py index cbbe542ea7987..1ea1db2b3bbb8 100644 --- a/airflow/api_connexion/endpoints/asset_endpoint.py +++ b/airflow/api_connexion/endpoints/asset_endpoint.py @@ -133,7 +133,7 @@ def get_asset_events( query = select(AssetEvent) if asset_id: - query = query.where(AssetEvent.dataset_id == asset_id) + query = query.where(AssetEvent.asset_id == asset_id) if source_dag_id: query = query.where(AssetEvent.source_dag_id == source_dag_id) if source_task_id: @@ -166,7 +166,7 @@ def _generate_queued_event_where_clause( where_clause.append(AssetDagRunQueue.target_dag_id == dag_id) if uri is not None: where_clause.append( - AssetDagRunQueue.dataset_id.in_( + AssetDagRunQueue.asset_id.in_( select(AssetModel.id).where(AssetModel.uri == uri), ), ) @@ -187,7 +187,7 @@ def get_dag_asset_queued_event( where_clause = _generate_queued_event_where_clause(dag_id=dag_id, uri=uri, before=before) adrq = session.scalar( select(AssetDagRunQueue) - .join(AssetModel, AssetDagRunQueue.dataset_id == AssetModel.id) + .join(AssetModel, AssetDagRunQueue.asset_id == AssetModel.id) .where(*where_clause) ) if adrq is None: @@ -228,7 +228,7 @@ def get_dag_asset_queued_events( where_clause = _generate_queued_event_where_clause(dag_id=dag_id, before=before) query = ( select(AssetDagRunQueue, AssetModel.uri) - .join(AssetModel, AssetDagRunQueue.dataset_id == AssetModel.id) + .join(AssetModel, AssetDagRunQueue.asset_id == AssetModel.id) .where(*where_clause) ) result = session.execute(query).all() @@ -278,7 +278,7 @@ def get_asset_queued_events( ) query = ( select(AssetDagRunQueue, AssetModel.uri) - .join(AssetModel, AssetDagRunQueue.dataset_id == AssetModel.id) + .join(AssetModel, AssetDagRunQueue.asset_id == AssetModel.id) .where(*where_clause) ) total_entries = get_query_count(query, session=session) diff --git a/airflow/api_connexion/endpoints/backfill_endpoint.py b/airflow/api_connexion/endpoints/backfill_endpoint.py deleted file mode 100644 index 94d6ad21f05f1..0000000000000 --- a/airflow/api_connexion/endpoints/backfill_endpoint.py +++ /dev/null @@ -1,180 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -import logging -from functools import wraps -from typing import TYPE_CHECKING - -from flask import request -from marshmallow import ValidationError -from sqlalchemy import select - -from airflow.api_connexion import security -from airflow.api_connexion.exceptions import BadRequest, Conflict, NotFound -from airflow.api_connexion.schemas.backfill_schema import ( - BackfillCollection, - backfill_collection_schema, - backfill_schema, -) -from airflow.models.backfill import ( - AlreadyRunningBackfill, - Backfill, - _cancel_backfill, - _create_backfill, -) -from airflow.utils.session import NEW_SESSION, provide_session -from airflow.www.decorators import action_logging - -if TYPE_CHECKING: - from datetime import datetime - - from sqlalchemy.orm import Session - - from airflow.api_connexion.types import APIResponse - -log = logging.getLogger(__name__) - -RESOURCE_EVENT_PREFIX = "dag" - - -def backfill_to_dag(func): - """ - Enrich the request with dag_id. - - :meta private: - """ - - @wraps(func) - def wrapper(*, backfill_id, session, **kwargs): - backfill = session.get(Backfill, backfill_id) - if not backfill: - raise NotFound("Backfill not found") - return func(dag_id=backfill.dag_id, backfill_id=backfill_id, session=session, **kwargs) - - return wrapper - - -@security.requires_access_dag("GET") -@action_logging -@provide_session -def list_backfills(dag_id, session): - backfills = session.scalars(select(Backfill).where(Backfill.dag_id == dag_id)).all() - obj = BackfillCollection( - backfills=backfills, - total_entries=len(backfills), - ) - return backfill_collection_schema.dump(obj) - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("PUT") -@action_logging -def pause_backfill(*, backfill_id, session, **kwargs): - br = session.get(Backfill, backfill_id) - if br.completed_at: - raise Conflict("Backfill is already completed.") - if br.is_paused is False: - br.is_paused = True - session.commit() - return backfill_schema.dump(br) - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("PUT") -@action_logging -def unpause_backfill(*, backfill_id, session, **kwargs): - br = session.get(Backfill, backfill_id) - if br.completed_at: - raise Conflict("Backfill is already completed.") - if br.is_paused: - br.is_paused = False - session.commit() - return backfill_schema.dump(br) - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("GET") -@action_logging -def get_backfill(*, backfill_id: int, session: Session = NEW_SESSION, **kwargs): - backfill = session.get(Backfill, backfill_id) - if backfill: - return backfill_schema.dump(backfill) - raise NotFound("Backfill not found") - - -def backfill_obj_to_kwargs(f): - """ - Convert the request body (containing backfill object json) to kwargs. - - The main point here is to be compatible with the ``requires_access_dag`` decorator, - which takes dag_id kwarg and doesn't support json request body. - """ - - @wraps(f) - def inner(): - body = request.json - try: - obj = backfill_schema.load(body) - except ValidationError as err: - raise BadRequest(detail=str(err.messages)) - return f(**obj) - - return inner - - -@backfill_obj_to_kwargs -@security.requires_access_dag("PUT") -@action_logging -def create_backfill( - dag_id: str, - from_date: datetime, - to_date: datetime, - max_active_runs: int = 10, - reverse: bool = False, - dag_run_conf: dict | None = None, -) -> APIResponse: - try: - backfill_obj = _create_backfill( - dag_id=dag_id, - from_date=from_date, - to_date=to_date, - max_active_runs=max_active_runs, - reverse=reverse, - dag_run_conf=dag_run_conf, - ) - return backfill_schema.dump(backfill_obj) - except AlreadyRunningBackfill: - raise Conflict(f"There is already a running backfill for dag {dag_id}") - - -@provide_session -@backfill_to_dag -@security.requires_access_dag("PUT") -@action_logging -def cancel_backfill( - *, - backfill_id, - session: Session = NEW_SESSION, # used by backfill_to_dag decorator - **kwargs, -): - br = _cancel_backfill(backfill_id=backfill_id) - return backfill_schema.dump(br) diff --git a/airflow/api_connexion/endpoints/dag_endpoint.py b/airflow/api_connexion/endpoints/dag_endpoint.py index 0352297bfffd4..352bf9cfd4c11 100644 --- a/airflow/api_connexion/endpoints/dag_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_endpoint.py @@ -134,7 +134,7 @@ def get_dags( try: dags_collection_schema = ( - DAGCollectionSchema(only=[f"dags.{field}" for field in fields]) + DAGCollectionSchema(only=[f"dags.{field}" for field in fields] + ["total_entries"]) if fields else DAGCollectionSchema() ) diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 74eae13ddd4d0..8ebb2b44e2bb3 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -130,7 +130,7 @@ def get_upstream_asset_events(*, dag_id: str, dag_run_id: str, session: Session "DAGRun not found", detail=f"DAGRun with DAG ID: '{dag_id}' and DagRun ID: '{dag_run_id}' not found", ) - events = dag_run.consumed_dataset_events + events = dag_run.consumed_asset_events return asset_event_collection_schema.dump( AssetEventCollection(asset_events=events, total_entries=len(events)) ) @@ -373,6 +373,7 @@ def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: raise AlreadyExists(detail=f"DAGRun with DAG ID: '{dag_id}' and DAGRun ID: '{run_id}' already exists") +@mark_fastapi_migration_done @security.requires_access_dag("PUT", DagAccessEntity.RUN) @provide_session @action_logging diff --git a/airflow/api_connexion/endpoints/dag_source_endpoint.py b/airflow/api_connexion/endpoints/dag_source_endpoint.py index d7713106d1cdc..9a3285884e7e6 100644 --- a/airflow/api_connexion/endpoints/dag_source_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py @@ -28,6 +28,7 @@ from airflow.auth.managers.models.resource_details import DagAccessEntity, DagDetails from airflow.models.dag import DagModel from airflow.models.dagcode import DagCode +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.session import NEW_SESSION, provide_session from airflow.www.extensions.init_auth_manager import get_auth_manager @@ -37,6 +38,7 @@ from airflow.auth.managers.models.batch_apis import IsAuthorizedDagRequest +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.CODE) @provide_session def get_dag_source(*, file_token: str, session: Session = NEW_SESSION) -> Response: diff --git a/airflow/api_connexion/endpoints/dag_warning_endpoint.py b/airflow/api_connexion/endpoints/dag_warning_endpoint.py index 8a15a30cece8f..a158c3f443c87 100644 --- a/airflow/api_connexion/endpoints/dag_warning_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_warning_endpoint.py @@ -29,6 +29,7 @@ from airflow.api_connexion.security import get_readable_dags from airflow.auth.managers.models.resource_details import DagAccessEntity from airflow.models.dagwarning import DagWarning as DagWarningModel +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.db import get_query_count from airflow.utils.session import NEW_SESSION, provide_session @@ -38,6 +39,7 @@ from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.WARNING) @format_parameters({"limit": check_limit}) @provide_session diff --git a/airflow/api_connexion/endpoints/event_log_endpoint.py b/airflow/api_connexion/endpoints/event_log_endpoint.py index ef55ad145c755..8084c2ecab674 100644 --- a/airflow/api_connexion/endpoints/event_log_endpoint.py +++ b/airflow/api_connexion/endpoints/event_log_endpoint.py @@ -31,6 +31,7 @@ from airflow.auth.managers.models.resource_details import DagAccessEntity from airflow.models import Log from airflow.utils import timezone +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.db import get_query_count from airflow.utils.session import NEW_SESSION, provide_session @@ -40,6 +41,7 @@ from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.AUDIT_LOG) @provide_session def get_event_log(*, event_log_id: int, session: Session = NEW_SESSION) -> APIResponse: diff --git a/airflow/api_connexion/endpoints/plugin_endpoint.py b/airflow/api_connexion/endpoints/plugin_endpoint.py index 5a100fd6d5878..97f9d8c6c4e09 100644 --- a/airflow/api_connexion/endpoints/plugin_endpoint.py +++ b/airflow/api_connexion/endpoints/plugin_endpoint.py @@ -23,11 +23,13 @@ from airflow.api_connexion.schemas.plugin_schema import PluginCollection, plugin_collection_schema from airflow.auth.managers.models.resource_details import AccessView from airflow.plugins_manager import get_plugin_info +from airflow.utils.api_migration import mark_fastapi_migration_done if TYPE_CHECKING: from airflow.api_connexion.types import APIResponse +@mark_fastapi_migration_done @security.requires_access_view(AccessView.PLUGINS) @format_parameters({"limit": check_limit}) def get_plugins(*, limit: int, offset: int = 0) -> APIResponse: diff --git a/airflow/api_connexion/endpoints/pool_endpoint.py b/airflow/api_connexion/endpoints/pool_endpoint.py index 553d50c7464b7..a6ccd3a4aa9b9 100644 --- a/airflow/api_connexion/endpoints/pool_endpoint.py +++ b/airflow/api_connexion/endpoints/pool_endpoint.py @@ -30,6 +30,7 @@ from airflow.api_connexion.parameters import apply_sorting, check_limit, format_parameters from airflow.api_connexion.schemas.pool_schema import PoolCollection, pool_collection_schema, pool_schema from airflow.models.pool import Pool +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.session import NEW_SESSION, provide_session from airflow.www.decorators import action_logging @@ -39,6 +40,7 @@ from airflow.api_connexion.types import APIResponse, UpdateMask +@mark_fastapi_migration_done @security.requires_access_pool("DELETE") @action_logging @provide_session @@ -53,6 +55,7 @@ def delete_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIRespons return Response(status=HTTPStatus.NO_CONTENT) +@mark_fastapi_migration_done @security.requires_access_pool("GET") @provide_session def get_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIResponse: @@ -63,6 +66,7 @@ def get_pool(*, pool_name: str, session: Session = NEW_SESSION) -> APIResponse: return pool_schema.dump(obj) +@mark_fastapi_migration_done @security.requires_access_pool("GET") @format_parameters({"limit": check_limit}) @provide_session @@ -83,6 +87,7 @@ def get_pools( return pool_collection_schema.dump(PoolCollection(pools=pools, total_entries=total_entries)) +@mark_fastapi_migration_done @security.requires_access_pool("PUT") @action_logging @provide_session @@ -118,9 +123,11 @@ def patch_pool( # there is no way field is None here (UpdateMask is a List[str]) # so if pool_schema.declared_fields[field].attribute is None file is returned update_mask = [ - pool_schema.declared_fields[field].attribute # type: ignore[misc] - if pool_schema.declared_fields[field].attribute - else field + ( + pool_schema.declared_fields[field].attribute # type: ignore[misc] + if pool_schema.declared_fields[field].attribute + else field + ) for field in update_mask ] except KeyError as err: @@ -140,6 +147,7 @@ def patch_pool( return pool_schema.dump(pool) +@mark_fastapi_migration_done @security.requires_access_pool("POST") @action_logging @provide_session diff --git a/airflow/api_connexion/endpoints/provider_endpoint.py b/airflow/api_connexion/endpoints/provider_endpoint.py index d9ba0c819b702..1eb032dee030e 100644 --- a/airflow/api_connexion/endpoints/provider_endpoint.py +++ b/airflow/api_connexion/endpoints/provider_endpoint.py @@ -28,6 +28,7 @@ ) from airflow.auth.managers.models.resource_details import AccessView from airflow.providers_manager import ProvidersManager +from airflow.utils.api_migration import mark_fastapi_migration_done if TYPE_CHECKING: from airflow.api_connexion.types import APIResponse @@ -46,6 +47,7 @@ def _provider_mapper(provider: ProviderInfo) -> Provider: ) +@mark_fastapi_migration_done @security.requires_access_view(AccessView.PROVIDERS) def get_providers() -> APIResponse: """Get providers.""" diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index 0e98173f68a5f..b862ed1469840 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -52,6 +52,7 @@ from airflow.models.taskinstance import TaskInstance as TI, clear_task_instances from airflow.models.taskinstancehistory import TaskInstanceHistory as TIH from airflow.utils.airflow_flask_app import get_airflow_app +from airflow.utils.api_migration import mark_fastapi_migration_done from airflow.utils.db import get_query_count from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.state import DagRunState, TaskInstanceState @@ -69,6 +70,7 @@ T = TypeVar("T") +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.TASK_INSTANCE) @provide_session def get_task_instance( @@ -102,6 +104,7 @@ def get_task_instance( return task_instance_schema.dump(task_instance) +@mark_fastapi_migration_done @security.requires_access_dag("GET", DagAccessEntity.TASK_INSTANCE) @provide_session def get_mapped_task_instance( @@ -425,6 +428,7 @@ def get_task_instances_batch(session: Session = NEW_SESSION) -> APIResponse: except _UnsupportedOrderBy as e: raise BadRequest(detail=f"Ordering with {e.order_by!r} is not supported") + ti_query = ti_query.offset(data["page_offset"]).limit(data["page_limit"]) task_instances = session.scalars(ti_query) return task_instance_collection_schema.dump( @@ -533,9 +537,11 @@ def post_set_task_instances_state(*, dag_id: str, session: Session = NEW_SESSION detail=f"Task instance not found for task {task_id!r} on execution_date {execution_date}" ) - if run_id and not session.get( - TI, {"task_id": task_id, "dag_id": dag_id, "run_id": run_id, "map_index": -1} - ): + select_stmt = select(TI).where( + TI.dag_id == dag_id, TI.task_id == task_id, TI.run_id == run_id, TI.map_index == -1 + ) + + if run_id and not session.scalars(select_stmt).one_or_none(): error_message = f"Task instance not found for task {task_id!r} on DAG run with ID {run_id!r}" raise NotFound(detail=error_message) @@ -581,10 +587,12 @@ def patch_task_instance( if not dag.has_task(task_id): raise NotFound("Task not found", detail=f"Task {task_id!r} not found in DAG {dag_id!r}") - ti: TI | None = session.get( - TI, {"task_id": task_id, "dag_id": dag_id, "run_id": dag_run_id, "map_index": map_index} + select_stmt = select(TI).where( + TI.dag_id == dag_id, TI.task_id == task_id, TI.run_id == dag_run_id, TI.map_index == map_index ) + ti: TI | None = session.scalars(select_stmt).one_or_none() + if not ti: error_message = f"Task instance not found for task {task_id!r} on DAG run with ID {dag_run_id!r}" raise NotFound(detail=error_message) diff --git a/airflow/api_connexion/endpoints/variable_endpoint.py b/airflow/api_connexion/endpoints/variable_endpoint.py index 20e7ce1edeabe..00f5abf00bbaa 100644 --- a/airflow/api_connexion/endpoints/variable_endpoint.py +++ b/airflow/api_connexion/endpoints/variable_endpoint.py @@ -70,6 +70,7 @@ def get_variable(*, variable_key: str, session: Session = NEW_SESSION) -> Respon return variable_schema.dump(var) +@mark_fastapi_migration_done @security.requires_access_variable("GET") @format_parameters({"limit": check_limit}) @provide_session diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index e99f91639c49e..c884c15954116 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -245,155 +245,6 @@ servers: description: Apache Airflow Stable API. paths: - # Database entities - /backfills: - get: - summary: List backfills - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: list_backfills - tags: [Backfill] - parameters: - - name: dag_id - in: query - schema: - type: string - required: true - description: | - List backfills for this dag. - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/BackfillCollection" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - - post: - summary: Create a backfill job. - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: create_backfill - tags: [Backfill] - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "400": - $ref: "#/components/responses/BadRequest" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - - /backfills/{backfill_id}: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - get: - summary: Get a backfill - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: get_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - - /backfills/{backfill_id}/pause: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - post: - summary: Pause a backfill - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: pause_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "409": - $ref: "#/components/responses/Conflict" - - /backfills/{backfill_id}/unpause: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - post: - summary: Pause a backfill - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: unpause_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "409": - $ref: "#/components/responses/Conflict" - - /backfills/{backfill_id}/cancel: - parameters: - - $ref: "#/components/parameters/BackfillIdPath" - post: - summary: Cancel a backfill - description: | - When a backfill is cancelled, all queued dag runs will be marked as failed. - Running dag runs will be allowed to continue. - x-openapi-router-controller: airflow.api_connexion.endpoints.backfill_endpoint - operationId: cancel_backfill - tags: [Backfill] - responses: - "200": - description: Success. - content: - application/json: - schema: - $ref: "#/components/schemas/Backfill" - "401": - $ref: "#/components/responses/Unauthenticated" - "403": - $ref: "#/components/responses/PermissionDenied" - "404": - $ref: "#/components/responses/NotFound" - "409": - $ref: "#/components/responses/Conflict" - # Database entities /connections: get: @@ -1146,6 +997,7 @@ paths: Get asset for a dag run. *New in version 2.4.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/dags/{dag_id}/dagRuns/{dag_run_id}/upstreamDatasetEvents" x-openapi-router-controller: airflow.api_connexion.endpoints.dag_run_endpoint operationId: get_upstream_asset_events tags: [DAGRun, Asset] @@ -1211,6 +1063,7 @@ paths: Get a queued asset event for a DAG. *New in version 2.9.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/dags/{dag_id}/datasets/queuedEvent/{uri}" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: get_dag_asset_queued_event parameters: @@ -1236,6 +1089,7 @@ paths: Delete a queued Asset event for a DAG. *New in version 2.9.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/dags/{dag_id}/datasets/queuedEvent/{uri}" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: delete_dag_asset_queued_event parameters: @@ -1263,6 +1117,7 @@ paths: Get queued Asset events for a DAG. *New in version 2.9.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/dags/{dag_id}/datasets/queuedEvent" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: get_dag_asset_queued_events parameters: @@ -1288,6 +1143,7 @@ paths: Delete queued Asset events for a DAG. *New in version 2.9.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/dags/{dag_id}/datasets/queuedEvent" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: delete_dag_asset_queued_events parameters: @@ -1336,6 +1192,7 @@ paths: Get queued Asset events for an Asset *New in version 2.9.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/assets/queuedEvent/{uri}" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: get_asset_queued_events parameters: @@ -1358,9 +1215,10 @@ paths: delete: summary: Delete queued Asset events for an Asset. description: | - Delete queued Asset events for a Asset. + Delete queued Asset events for an Asset. *New in version 2.9.0* + *Changed in 3.0.0*: The endpoint value was renamed from "/assets/queuedEvent/{uri}" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: delete_asset_queued_events parameters: @@ -2480,6 +2338,8 @@ paths: x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: get_assets tags: [Asset] + description: | + *Changed in 3.0.0*: The endpoint value was renamed from "/datasets" parameters: - $ref: "#/components/parameters/PageLimit" - $ref: "#/components/parameters/PageOffset" @@ -2517,7 +2377,10 @@ paths: - $ref: "#/components/parameters/AssetURI" get: summary: Get an asset - description: Get an asset by uri. + description: | + Get an asset by uri. + + *Changed in 3.0.0*: The endpoint value was renamed from "/datasets/{uri}" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: get_asset tags: [Asset] @@ -2538,7 +2401,10 @@ paths: /assets/events: get: summary: Get asset events - description: Get asset events + description: | + Get asset events + + *Changed in 3.0.0*: The endpoint value was renamed from "/datasets/events" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: get_asset_events tags: [Asset] @@ -2566,7 +2432,10 @@ paths: $ref: "#/components/responses/NotFound" post: summary: Create asset event - description: Create asset event + description: | + Create asset event + + *Changed in 3.0.0*: The endpoint value was renamed from "/datasets/events" x-openapi-router-controller: airflow.api_connexion.endpoints.asset_endpoint operationId: create_asset_event tags: [Asset] @@ -2884,6 +2753,16 @@ components: type: boolean nullable: true description: is_paused + reprocess_behavior: + type: string + default: none + enum: + - none + - failed + - completed + description: | + Controls whether new runs will be created when there's an existing run + for a given logical date. max_active_runs: type: integer nullable: true @@ -3051,14 +2930,6 @@ components: Time when the DAG last received a refresh signal (e.g. the DAG's "refresh" button was clicked in the web UI) - *New in version 2.3.0* - scheduler_lock: - type: boolean - readOnly: true - nullable: true - description: | - Whether (one of) the scheduler is scheduling this DAG at the moment - *New in version 2.3.0* pickle_id: type: string @@ -3299,7 +3170,9 @@ components: - backfill - manual - scheduled - - dataset_triggered + - asset_triggered + description: | + *Changed in 3.0.0*: The asset_triggered value was renamed from dataset_triggered. state: $ref: "#/components/schemas/DagState" external_trigger: @@ -4088,9 +3961,12 @@ components: dag_run_timeout: $ref: "#/components/schemas/TimeDelta" nullable: true - dataset_expression: + asset_expression: type: object - description: Nested asset any/all conditions + description: | + Nested asset any/all conditions + + *Changed in 3.0.0*: The asset_expression value was renamed from dataset_expression. nullable: true doc_md: type: string @@ -4475,6 +4351,7 @@ components: An asset item. *New in version 2.4.0* + *Changed in 3.0.0*: This was renamed from Dataset. type: object properties: id: @@ -4510,6 +4387,7 @@ components: An asset reference to an upstream task. *New in version 2.4.0* + *Changed in 3.0.0*: This was renamed from TaskOutletDatasetReference. type: object properties: dag_id: @@ -4534,6 +4412,7 @@ components: An asset reference to a downstream DAG. *New in version 2.4.0* + *Changed in 3.0.0*: This was renamed from DagScheduleDatasetReference. type: object properties: dag_id: @@ -4554,6 +4433,7 @@ components: A collection of assets. *New in version 2.4.0* + *Changed in 3.0.0*: This was renamed from DatasetCollection. type: object allOf: - type: object @@ -4562,6 +4442,8 @@ components: type: array items: $ref: "#/components/schemas/Asset" + description: | + *Changed in 3.0.0*: This was renamed from datasets. - $ref: "#/components/schemas/CollectionInfo" AssetEvent: @@ -4569,14 +4451,21 @@ components: An asset event. *New in version 2.4.0* + *Changed in 3.0.0*: This was renamed from DatasetEvent. type: object properties: - dataset_id: + asset_id: type: integer - description: The asset id - dataset_uri: + description: | + The asset id + + *Changed in 3.0.0*: This was renamed from dataset_id. + asset_uri: type: string - description: The URI of the asset + description: | + The URI of the asset + + *Changed in 3.0.0*: This was renamed from dataset_uri. nullable: false extra: type: object @@ -4611,10 +4500,15 @@ components: type: object required: - asset_uri + description: | + *Changed in 3.0.0*: This was renamed from CreateDatasetEvent. properties: asset_uri: type: string - description: The URI of the asset + description: | + The URI of the asset + + *Changed in 3.0.0*: This was renamed from dataset_uri. nullable: false extra: type: object @@ -4705,12 +4599,15 @@ components: A collection of asset events. *New in version 2.4.0* + *Changed in 3.0.0*: This was renamed from DatasetEventCollection. type: object allOf: - type: object properties: asset_events: type: array + description: | + *Changed in 3.0.0*: This was renamed from dataset_events. items: $ref: "#/components/schemas/AssetEvent" - $ref: "#/components/schemas/CollectionInfo" @@ -4994,6 +4891,15 @@ components: ListTaskInstanceForm: type: object properties: + page_offset: + type: integer + minimum: 0 + description: The number of items to skip before starting to collect the result set. + page_limit: + type: integer + minimum: 1 + default: 100 + description: The numbers of items to return. dag_ids: type: array items: @@ -5515,7 +5421,10 @@ components: type: string format: path required: true - description: The encoded Asset URI + description: | + The encoded Asset URI + + *Changed in 3.0.0*: This was renamed from DatasetURI. PoolName: in: path @@ -5701,7 +5610,10 @@ components: name: asset_id schema: type: integer - description: The Asset ID that updated the asset. + description: | + The Asset ID that updated the asset. + + *Changed in 3.0.0*: This was renamed from FilterDatasetID. FilterSourceDAGID: in: query diff --git a/airflow/api_connexion/schemas/asset_schema.py b/airflow/api_connexion/schemas/asset_schema.py index 662f73a50d8b9..7f84b799d1a77 100644 --- a/airflow/api_connexion/schemas/asset_schema.py +++ b/airflow/api_connexion/schemas/asset_schema.py @@ -136,8 +136,8 @@ class Meta: model = AssetEvent id = auto_field() - dataset_id = auto_field() - dataset_uri = fields.String(attribute="dataset.uri", dump_only=True) + asset_id = auto_field() + asset_uri = fields.String(attribute="asset.uri", dump_only=True) extra = JsonObjectField() source_task_id = auto_field() source_dag_id = auto_field() diff --git a/airflow/api_connexion/schemas/backfill_schema.py b/airflow/api_connexion/schemas/backfill_schema.py deleted file mode 100644 index db496bf1ac5d5..0000000000000 --- a/airflow/api_connexion/schemas/backfill_schema.py +++ /dev/null @@ -1,79 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -from typing import NamedTuple - -from marshmallow import Schema, fields -from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field - -from airflow.models.backfill import Backfill, BackfillDagRun - - -class BackfillSchema(SQLAlchemySchema): - """Backfill Schema.""" - - class Meta: - """Meta.""" - - model = Backfill - - id = auto_field(dump_only=True) - dag_id = auto_field() - from_date = auto_field() - to_date = auto_field() - dag_run_conf = fields.Dict(allow_none=True) - reverse = fields.Boolean() - is_paused = auto_field() - max_active_runs = auto_field() - created_at = auto_field() - completed_at = auto_field() - updated_at = auto_field() - - -class BackfillDagRunSchema(SQLAlchemySchema): - """Trigger Schema.""" - - class Meta: - """Meta.""" - - model = BackfillDagRun - - id = auto_field(dump_only=True) - backfill_id = auto_field(dump_only=True) - dag_run_id = auto_field(dump_only=True) - sort_ordinal = auto_field(dump_only=True) - - -class BackfillCollection(NamedTuple): - """List of Backfills with meta.""" - - backfills: list[Backfill] - total_entries: int - - -class BackfillCollectionSchema(Schema): - """Backfill Collection Schema.""" - - backfills = fields.List(fields.Nested(BackfillSchema)) - total_entries = fields.Int() - - -backfill_schema = BackfillSchema() -backfill_dag_run_schema = BackfillDagRunSchema() -backfill_collection_schema = BackfillCollectionSchema() diff --git a/airflow/api_connexion/schemas/dag_schema.py b/airflow/api_connexion/schemas/dag_schema.py index 6c7ff6fdc30e0..f22812abd1114 100644 --- a/airflow/api_connexion/schemas/dag_schema.py +++ b/airflow/api_connexion/schemas/dag_schema.py @@ -56,8 +56,6 @@ class Meta: last_parsed_time = auto_field(dump_only=True) last_pickled = auto_field(dump_only=True) last_expired = auto_field(dump_only=True) - scheduler_lock = auto_field(dump_only=True) - pickle_id = auto_field(dump_only=True) default_view = auto_field(dump_only=True) fileloc = auto_field(dump_only=True) file_token = fields.Method("get_token", dump_only=True) @@ -98,7 +96,7 @@ class DAGDetailSchema(DAGSchema): catchup = fields.Boolean(dump_only=True) orientation = fields.String(dump_only=True) max_active_tasks = fields.Integer(dump_only=True) - dataset_expression = fields.Dict(allow_none=True) + asset_expression = fields.Dict(allow_none=True) start_date = fields.DateTime(dump_only=True) dag_run_timeout = fields.Nested(TimeDeltaSchema, attribute="dagrun_timeout", dump_only=True) doc_md = fields.String(dump_only=True) diff --git a/airflow/api_fastapi/app.py b/airflow/api_fastapi/app.py index 9c3e5a3bd7983..43885724564c1 100644 --- a/airflow/api_fastapi/app.py +++ b/airflow/api_fastapi/app.py @@ -19,9 +19,8 @@ import logging from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware -from airflow.api_fastapi.core_api.app import init_dag_bag, init_plugins, init_views +from airflow.api_fastapi.core_api.app import init_config, init_dag_bag, init_plugins, init_views from airflow.api_fastapi.execution_api.app import create_task_execution_api_app log = logging.getLogger(__name__) @@ -30,8 +29,6 @@ def create_app(apps: str = "all") -> FastAPI: - from airflow.configuration import conf - apps_list = apps.split(",") if apps else ["all"] app = FastAPI( @@ -50,18 +47,7 @@ def create_app(apps: str = "all") -> FastAPI: task_exec_api_app = create_task_execution_api_app(app) app.mount("/execution", task_exec_api_app) - allow_origins = conf.getlist("api", "access_control_allow_origins") - allow_methods = conf.getlist("api", "access_control_allow_methods") - allow_headers = conf.getlist("api", "access_control_allow_headers") - - if allow_origins or allow_methods or allow_headers: - app.add_middleware( - CORSMiddleware, - allow_origins=allow_origins, - allow_credentials=True, - allow_methods=allow_methods, - allow_headers=allow_headers, - ) + init_config(app) return app diff --git a/airflow/api_fastapi/common/parameters.py b/airflow/api_fastapi/common/parameters.py index 4aa8335905ca0..bd65017637227 100644 --- a/airflow/api_fastapi/common/parameters.py +++ b/airflow/api_fastapi/common/parameters.py @@ -31,6 +31,7 @@ from airflow.models import Base, Connection from airflow.models.dag import DagModel, DagTag from airflow.models.dagrun import DagRun +from airflow.models.dagwarning import DagWarning, DagWarningType from airflow.utils import timezone from airflow.utils.state import DagRunState @@ -197,9 +198,9 @@ def to_orm(self, select: Select) -> Select: primary_key_column = self.get_primary_key_column() if self.value[0] == "-": - return select.order_by(nullscheck, column.desc(), primary_key_column) + return select.order_by(nullscheck, column.desc(), primary_key_column.desc()) else: - return select.order_by(nullscheck, column.asc(), primary_key_column) + return select.order_by(nullscheck, column.asc(), primary_key_column.asc()) def get_primary_key_column(self) -> Column: """Get the primary key column of the model of SortParam object.""" @@ -265,6 +266,17 @@ def depends(self, last_dag_run_state: DagRunState | None = None) -> _LastDagRunS return self.set_value(last_dag_run_state) +class _DagTagNamePatternSearch(_SearchParam): + """Search on dag_tag.name.""" + + def __init__(self, skip_none: bool = True) -> None: + super().__init__(DagTag.name, skip_none) + + def depends(self, tag_name_pattern: str | None = None) -> _DagTagNamePatternSearch: + tag_name_pattern = super().transform_aliases(tag_name_pattern) + return self.set_value(tag_name_pattern) + + def _safe_parse_datetime(date_to_check: str) -> datetime: """ Parse datetime and raise error for invalid dates. @@ -281,6 +293,34 @@ def _safe_parse_datetime(date_to_check: str) -> datetime: ) +class _WarningTypeFilter(BaseParam[str]): + """Filter on warning type.""" + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(DagWarning.warning_type == self.value) + + def depends(self, warning_type: DagWarningType | None = None) -> _WarningTypeFilter: + return self.set_value(warning_type) + + +class _DagIdFilter(BaseParam[str]): + """Filter on dag_id.""" + + def __init__(self, attribute: ColumnElement, skip_none: bool = True) -> None: + super().__init__(skip_none) + self.attribute = attribute + + def to_orm(self, select: Select) -> Select: + if self.value is None and self.skip_none: + return select + return select.where(self.attribute == self.value) + + def depends(self, dag_id: str | None = None) -> _DagIdFilter: + return self.set_value(dag_id) + + # Common Safe DateTime DateTimeQuery = Annotated[str, AfterValidator(_safe_parse_datetime)] # DAG @@ -299,3 +339,8 @@ def _safe_parse_datetime(date_to_check: str) -> datetime: QueryOwnersFilter = Annotated[_OwnersFilter, Depends(_OwnersFilter().depends)] # DagRun QueryLastDagRunStateFilter = Annotated[_LastDagRunStateFilter, Depends(_LastDagRunStateFilter().depends)] +# DAGWarning +QueryDagIdInDagWarningFilter = Annotated[_DagIdFilter, Depends(_DagIdFilter(DagWarning.dag_id).depends)] +QueryWarningTypeFilter = Annotated[_WarningTypeFilter, Depends(_WarningTypeFilter().depends)] +# DAGTags +QueryDagTagPatternSearch = Annotated[_DagTagNamePatternSearch, Depends(_DagTagNamePatternSearch().depends)] diff --git a/airflow/api_fastapi/core_api/app.py b/airflow/api_fastapi/core_api/app.py index f47a33227f366..47927ad6fbbb0 100644 --- a/airflow/api_fastapi/core_api/app.py +++ b/airflow/api_fastapi/core_api/app.py @@ -22,6 +22,7 @@ from typing import cast from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware from starlette.requests import Request from starlette.responses import HTMLResponse from starlette.staticfiles import StaticFiles @@ -94,3 +95,22 @@ def init_plugins(app: FastAPI) -> None: log.debug("Adding subapplication %s under prefix %s", name, url_prefix) app.mount(url_prefix, subapp) + + +def init_config(app: FastAPI) -> None: + from airflow.configuration import conf + + allow_origins = conf.getlist("api", "access_control_allow_origins") + allow_methods = conf.getlist("api", "access_control_allow_methods") + allow_headers = conf.getlist("api", "access_control_allow_headers") + + if allow_origins or allow_methods or allow_headers: + app.add_middleware( + CORSMiddleware, + allow_origins=allow_origins, + allow_credentials=True, + allow_methods=allow_methods, + allow_headers=allow_headers, + ) + + app.state.secret_key = conf.get("webserver", "secret_key") diff --git a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml index d7ee6340c6680..a82e34a3f5ee3 100644 --- a/airflow/api_fastapi/core_api/openapi/v1-generated.yaml +++ b/airflow/api_fastapi/core_api/openapi/v1-generated.yaml @@ -73,14 +73,21 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/: + /ui/dags/recent_dag_runs: get: tags: - - DAG - summary: Get Dags - description: Get all DAGs. - operationId: get_dags + - Dags + summary: Recent Dag Runs + description: Get recent DAG runs. + operationId: recent_dag_runs parameters: + - name: dag_runs_limit + in: query + required: false + schema: + type: integer + default: 10 + title: Dag Runs Limit - name: limit in: query required: false @@ -150,43 +157,32 @@ paths: - $ref: '#/components/schemas/DagRunState' - type: 'null' title: Last Dag Run State - - name: order_by - in: query - required: false - schema: - type: string - default: dag_id - title: Order By responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/DAGCollectionResponse' + $ref: '#/components/schemas/DAGWithLatestDagRunsCollectionResponse' '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - patch: + /public/backfills/: + get: tags: - - DAG - summary: Patch Dags - description: Patch multiple DAGs. - operationId: patch_dags + - Backfill + summary: List Backfills + operationId: list_backfills parameters: - - name: update_mask + - name: dag_id in: query - required: false + required: true schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask + type: string + title: Dag Id - name: limit in: query required: false @@ -201,72 +197,19 @@ paths: type: integer default: 0 title: Offset - - name: tags - in: query - required: false - schema: - type: array - items: - type: string - title: Tags - - name: owners - in: query - required: false - schema: - type: array - items: - type: string - title: Owners - - name: dag_id_pattern - in: query - required: false - schema: - anyOf: - - type: string - - type: 'null' - title: Dag Id Pattern - - name: only_active - in: query - required: false - schema: - type: boolean - default: true - title: Only Active - - name: paused - in: query - required: false - schema: - anyOf: - - type: boolean - - type: 'null' - title: Paused - - name: last_dag_run_state + - name: order_by in: query required: false schema: - anyOf: - - $ref: '#/components/schemas/DagRunState' - - type: 'null' - title: Last Dag Run State - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGPatchBody' + type: string + default: id + title: Order By responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGCollectionResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -279,45 +222,29 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/dags/{dag_id}: - get: + post: tags: - - DAG - summary: Get Dag - description: Get basic information about a DAG. - operationId: get_dag - parameters: - - name: dag_id - in: path + - Backfill + summary: Create Backfill + operationId: create_backfill + requestBody: required: true - schema: - type: string - title: Dag Id + content: + application/json: + schema: + $ref: '#/components/schemas/BackfillPostBody' responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -336,54 +263,37 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found - '422': + '409': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - patch: + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/backfills/{backfill_id}: + get: tags: - - DAG - summary: Patch Dag - description: Patch the specific DAG. - operationId: patch_dag + - Backfill + summary: Get Backfill + operationId: get_backfill parameters: - - name: dag_id + - name: backfill_id in: path required: true schema: type: string - title: Dag Id - - name: update_mask - in: query - required: false - schema: - anyOf: - - type: array - items: - type: string - - type: 'null' - title: Update Mask - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/DAGPatchBody' + title: Backfill Id responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -408,31 +318,24 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - delete: + /public/backfills/{backfill_id}/pause: + put: tags: - - DAG - summary: Delete Dag - description: Delete the specific DAG. - operationId: delete_dag + - Backfill + summary: Pause Backfill + operationId: pause_backfill parameters: - - name: dag_id + - name: backfill_id in: path required: true schema: - type: string - title: Dag Id + title: Backfill Id responses: '200': description: Successful Response content: application/json: schema: {} - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request '401': content: application/json: @@ -451,39 +354,36 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found - '422': + '409': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - /public/dags/{dag_id}/details: - get: + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/backfills/{backfill_id}/unpause: + put: tags: - - DAG - summary: Get Dag Details - description: Get details of DAG. - operationId: get_dag_details + - Backfill + summary: Unpause Backfill + operationId: unpause_backfill parameters: - - name: dag_id + - name: backfill_id in: path required: true schema: - type: string - title: Dag Id + title: Backfill Id responses: '200': description: Successful Response content: application/json: - schema: - $ref: '#/components/schemas/DAGDetailsResponse' - '400': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Bad Request + schema: {} '401': content: application/json: @@ -502,29 +402,36 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found - '422': + '409': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unprocessable Entity - /public/connections/{connection_id}: - delete: + description: Conflict + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/backfills/{backfill_id}/cancel: + put: tags: - - Connection - summary: Delete Connection - description: Delete a connection entry. - operationId: delete_connection + - Backfill + summary: Cancel Backfill + operationId: cancel_backfill parameters: - - name: connection_id + - name: backfill_id in: path required: true schema: - type: string - title: Connection Id + title: Backfill Id responses: - '204': + '200': description: Successful Response + content: + application/json: + schema: {} '401': content: application/json: @@ -543,64 +450,132 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Conflict '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/: get: tags: - - Connection - summary: Get Connection - description: Get a connection entry. - operationId: get_connection + - DAG + summary: Get Dags + description: Get all DAGs. + operationId: get_dags parameters: - - name: connection_id - in: path - required: true + - name: limit + in: query + required: false schema: - type: string - title: Connection Id - responses: - '200': + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: tags + in: query + required: false + schema: + type: array + items: + type: string + title: Tags + - name: owners + in: query + required: false + schema: + type: array + items: + type: string + title: Owners + - name: dag_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id Pattern + - name: dag_display_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Display Name Pattern + - name: only_active + in: query + required: false + schema: + type: boolean + default: true + title: Only Active + - name: paused + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Paused + - name: last_dag_run_state + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/DagRunState' + - type: 'null' + title: Last Dag Run State + - name: order_by + in: query + required: false + schema: + type: string + default: dag_id + title: Order By + responses: + '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/ConnectionResponse' - '401': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Unauthorized - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found + $ref: '#/components/schemas/DAGCollectionResponse' '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/connections/: - get: + patch: tags: - - Connection - summary: Get Connections - description: Get all connection entries. - operationId: get_connections + - DAG + summary: Patch Dags + description: Patch multiple DAGs. + operationId: patch_dags parameters: + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask - name: limit in: query required: false @@ -615,20 +590,72 @@ paths: type: integer default: 0 title: Offset - - name: order_by + - name: tags in: query required: false schema: - type: string - default: id - title: Order By + type: array + items: + type: string + title: Tags + - name: owners + in: query + required: false + schema: + type: array + items: + type: string + title: Owners + - name: dag_id_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id Pattern + - name: only_active + in: query + required: false + schema: + type: boolean + default: true + title: Only Active + - name: paused + in: query + required: false + schema: + anyOf: + - type: boolean + - type: 'null' + title: Paused + - name: last_dag_run_state + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/DagRunState' + - type: 'null' + title: Last Dag Run State + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGPatchBody' responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/ConnectionCollectionResponse' + $ref: '#/components/schemas/DAGCollectionResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request '401': content: application/json: @@ -653,23 +680,50 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/variables/{variable_key}: - delete: + /public/dags/tags: + get: tags: - - Variable - summary: Delete Variable - description: Delete a variable entry. - operationId: delete_variable + - DAG + summary: Get Dag Tags + description: Get all DAG tags. + operationId: get_dag_tags parameters: - - name: variable_key - in: path - required: true + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false schema: type: string - title: Variable Key + default: name + title: Order By + - name: tag_name_pattern + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Tag Name Pattern responses: - '204': + '200': description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGTagCollectionResponse' '401': content: application/json: @@ -682,38 +736,39 @@ paths: schema: $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/HTTPExceptionResponse' - description: Not Found '422': description: Validation Error content: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}: get: tags: - - Variable - summary: Get Variable - description: Get a variable entry. - operationId: get_variable + - DAG + summary: Get Dag + description: Get basic information about a DAG. + operationId: get_dag parameters: - - name: variable_key + - name: dag_id in: path required: true schema: type: string - title: Variable Key + title: Dag Id responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/VariableResponse' + $ref: '#/components/schemas/DAGResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request '401': content: application/json: @@ -733,24 +788,24 @@ paths: $ref: '#/components/schemas/HTTPExceptionResponse' description: Not Found '422': - description: Validation Error content: application/json: schema: - $ref: '#/components/schemas/HTTPValidationError' + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity patch: tags: - - Variable - summary: Patch Variable - description: Update a variable by key. - operationId: patch_variable - parameters: - - name: variable_key - in: path + - DAG + summary: Patch Dag + description: Patch the specific DAG. + operationId: patch_dag + parameters: + - name: dag_id + in: path required: true schema: type: string - title: Variable Key + title: Dag Id - name: update_mask in: query required: false @@ -766,14 +821,14 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/VariableBody' + $ref: '#/components/schemas/DAGPatchBody' responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/VariableResponse' + $ref: '#/components/schemas/DAGResponse' '400': content: application/json: @@ -804,38 +859,245 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/variables/: - post: + delete: tags: - - Variable - summary: Post Variable - description: Create a variable. - operationId: post_variable - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/VariableBody' + - DAG + summary: Delete Dag + description: Delete the specific DAG. + operationId: delete_dag + parameters: + - name: dag_id + in: path required: true + schema: + type: string + title: Dag Id responses: - '201': + '200': description: Successful Response + content: + application/json: + schema: {} + '400': content: application/json: schema: - $ref: '#/components/schemas/VariableResponse' + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + /public/dags/{dag_id}/details: + get: + tags: + - DAG + summary: Get Dag Details + description: Get details of DAG. + operationId: get_dag_details + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGDetailsResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unprocessable Entity + /public/connections/{connection_id}: + delete: + tags: + - Connection + summary: Delete Connection + description: Delete a connection entry. + operationId: delete_connection + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Connection + summary: Get Connection + description: Get a connection entry. + operationId: get_connection + parameters: + - name: connection_id + in: path + required: true + schema: + type: string + title: Connection Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionResponse' '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/connections/: + get: + tags: + - Connection + summary: Get Connections + description: Get all connection entries. + operationId: get_connections + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectionCollectionResponse' + '401': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' description: Forbidden + '404': content: application/json: schema: $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found '422': description: Validation Error content: @@ -944,21 +1206,1056 @@ paths: application/json: schema: $ref: '#/components/schemas/HTTPValidationError' - /public/monitor/health: - get: + patch: tags: - - Monitor - summary: Get Health - operationId: get_health - responses: - '200': - description: Successful Response - content: + - DagRun + summary: Patch Dag Run State + description: Modify a DAG Run. + operationId: patch_dag_run_state + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGRunResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dagSources/{file_token}: + get: + tags: + - DagSource + summary: Get Dag Source + description: Get source code using file token. + operationId: get_dag_source + parameters: + - name: file_token + in: path + required: true + schema: + type: string + title: File Token + - name: accept + in: header + required: false + schema: + type: string + default: '*/*' + title: Accept + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGSourceResponse' + text/plain: + schema: + type: string + example: dag code + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '406': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Acceptable + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/eventLogs/{event_log_id}: + get: + tags: + - Event Log + summary: Get Event Log + operationId: get_event_log + parameters: + - name: event_log_id + in: path + required: true + schema: + type: integer + title: Event Log Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/EventLogResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/monitor/health: + get: + tags: + - Monitor + summary: Get Health + operationId: get_health + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/HealthInfoSchema' + /public/dagWarnings: + get: + tags: + - DagWarning + summary: List Dag Warnings + description: Get a list of DAG warnings. + operationId: list_dag_warnings + parameters: + - name: dag_id + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Dag Id + - name: warning_type + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/DagWarningType' + - type: 'null' + title: Warning Type + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: dag_id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/DAGWarningCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/plugins/: + get: + tags: + - Plugin + summary: Get Plugins + operationId: get_plugins + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PluginCollectionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/pools/{pool_name}: + delete: + tags: + - Pool + summary: Delete Pool + description: Delete a pool entry. + operationId: delete_pool + parameters: + - name: pool_name + in: path + required: true + schema: + type: string + title: Pool Name + responses: + '204': + description: Successful Response + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Pool + summary: Get Pool + description: Get a pool. + operationId: get_pool + parameters: + - name: pool_name + in: path + required: true + schema: + type: string + title: Pool Name + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Pool + summary: Patch Pool + description: Update a Pool. + operationId: patch_pool + parameters: + - name: pool_name + in: path + required: true + schema: + type: string + title: Pool Name + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PoolPatchBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/pools/: + get: + tags: + - Pool + summary: Get Pools + description: Get all pools entries. + operationId: get_pools + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Pool + summary: Post Pool + description: Create a Pool. + operationId: post_pool + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/PoolPostBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/PoolResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/providers/: + get: + tags: + - Provider + summary: Get Providers + description: Get providers. + operationId: get_providers + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/ProviderCollectionResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}: + get: + tags: + - Task Instance + summary: Get Task Instance + description: Get task instance. + operationId: get_task_instance + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}: + get: + tags: + - Task Instance + summary: Get Mapped Task Instance + description: Get task instance. + operationId: get_mapped_task_instance + parameters: + - name: dag_id + in: path + required: true + schema: + type: string + title: Dag Id + - name: dag_run_id + in: path + required: true + schema: + type: string + title: Dag Run Id + - name: task_id + in: path + required: true + schema: + type: string + title: Task Id + - name: map_index + in: path + required: true + schema: + type: integer + title: Map Index + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/TaskInstanceResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/variables/{variable_key}: + delete: + tags: + - Variable + summary: Delete Variable + description: Delete a variable entry. + operationId: delete_variable + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + responses: + '204': + description: Successful Response + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + get: + tags: + - Variable + summary: Get Variable + description: Get a variable entry. + operationId: get_variable + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + patch: + tags: + - Variable + summary: Patch Variable + description: Update a variable by key. + operationId: patch_variable + parameters: + - name: variable_key + in: path + required: true + schema: + type: string + title: Variable Key + - name: update_mask + in: query + required: false + schema: + anyOf: + - type: array + items: + type: string + - type: 'null' + title: Update Mask + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Bad Request + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Not Found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/variables/: + get: + tags: + - Variable + summary: Get Variables + description: Get all Variables entries. + operationId: get_variables + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 100 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: order_by + in: query + required: false + schema: + type: string + default: id + title: Order By + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableCollectionResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + post: + tags: + - Variable + summary: Post Variable + description: Create a variable. + operationId: post_variable + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/VariableBody' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VariableResponse' + '401': + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Unauthorized + '403': + content: application/json: schema: - $ref: '#/components/schemas/HealthInfoSchema' + $ref: '#/components/schemas/HTTPExceptionResponse' + description: Forbidden + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /public/version/: + get: + tags: + - Version + summary: Get Version + description: Get version information. + operationId: get_version + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VersionInfo' components: schemas: + AppBuilderMenuItemResponse: + properties: + name: + type: string + title: Name + href: + anyOf: + - type: string + - type: 'null' + title: Href + category: + anyOf: + - type: string + - type: 'null' + title: Category + additionalProperties: true + type: object + required: + - name + title: AppBuilderMenuItemResponse + description: Serializer for AppBuilder Menu Item responses. + AppBuilderViewResponse: + properties: + name: + anyOf: + - type: string + - type: 'null' + title: Name + category: + anyOf: + - type: string + - type: 'null' + title: Category + view: + anyOf: + - type: string + - type: 'null' + title: View + label: + anyOf: + - type: string + - type: 'null' + title: Label + additionalProperties: true + type: object + title: AppBuilderViewResponse + description: Serializer for AppBuilder View responses. + BackfillPostBody: + properties: + dag_id: + type: string + title: Dag Id + from_date: + type: string + format: date-time + title: From Date + to_date: + type: string + format: date-time + title: To Date + run_backwards: + type: boolean + title: Run Backwards + default: false + dag_run_conf: + type: object + title: Dag Run Conf + default: {} + reprocess_behavior: + $ref: '#/components/schemas/ReprocessBehavior' + default: none + max_active_runs: + type: integer + title: Max Active Runs + default: 10 + type: object + required: + - dag_id + - from_date + - to_date + title: BackfillPostBody + description: Object used for create backfill request. BaseInfoSchema: properties: status: @@ -986,7 +2283,7 @@ components: - connections - total_entries title: ConnectionCollectionResponse - description: DAG Collection serializer for responses. + description: Connection Collection serializer for responses. ConnectionResponse: properties: connection_id: @@ -1085,12 +2382,6 @@ components: format: date-time - type: 'null' title: Last Expired - scheduler_lock: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Scheduler Lock pickle_id: anyOf: - type: string @@ -1180,11 +2471,11 @@ components: format: duration - type: 'null' title: Dag Run Timeout - dataset_expression: + asset_expression: anyOf: - type: object - type: 'null' - title: Dataset Expression + title: Asset Expression doc_md: anyOf: - type: string @@ -1207,9 +2498,6 @@ components: - type: boolean - type: 'null' title: Is Paused Upon Creation - orientation: - type: string - title: Orientation params: anyOf: - type: object @@ -1255,7 +2543,6 @@ components: - last_parsed_time - last_pickled - last_expired - - scheduler_lock - pickle_id - default_view - fileloc @@ -1275,12 +2562,11 @@ components: - owners - catchup - dag_run_timeout - - dataset_expression + - asset_expression - doc_md - start_date - end_date - is_paused_upon_creation - - orientation - params - render_template_as_native_obj - template_search_path @@ -1332,12 +2618,6 @@ components: format: date-time - type: 'null' title: Last Expired - scheduler_lock: - anyOf: - - type: string - format: date-time - - type: 'null' - title: Scheduler Lock pickle_id: anyOf: - type: string @@ -1432,7 +2712,6 @@ components: - last_parsed_time - last_pickled - last_expired - - scheduler_lock - pickle_id - default_view - fileloc @@ -1453,6 +2732,23 @@ components: - file_token title: DAGResponse description: DAG serializer for responses. + DAGRunPatchBody: + properties: + state: + $ref: '#/components/schemas/DAGRunPatchStates' + type: object + required: + - state + title: DAGRunPatchBody + description: DAG Run Serializer for PATCH requests. + DAGRunPatchStates: + type: string + enum: + - queued + - success + - failed + title: DAGRunPatchStates + description: Enum for DAG Run states when updating a DAG Run. DAGRunResponse: properties: run_id: @@ -1567,17 +2863,251 @@ components: manual: type: integer title: Manual - dataset_triggered: + asset_triggered: type: integer - title: Dataset Triggered + title: Asset Triggered type: object required: - backfill - scheduled - manual - - dataset_triggered + - asset_triggered title: DAGRunTypes description: DAG Run Types for responses. + DAGSourceResponse: + properties: + content: + anyOf: + - type: string + - type: 'null' + title: Content + type: object + required: + - content + title: DAGSourceResponse + description: DAG Source serializer for responses. + DAGTagCollectionResponse: + properties: + tags: + items: + type: string + type: array + title: Tags + total_entries: + type: integer + title: Total Entries + type: object + required: + - tags + - total_entries + title: DAGTagCollectionResponse + description: DAG Tags Collection serializer for responses. + DAGWarningCollectionResponse: + properties: + dag_warnings: + items: + $ref: '#/components/schemas/DAGWarningResponse' + type: array + title: Dag Warnings + total_entries: + type: integer + title: Total Entries + type: object + required: + - dag_warnings + - total_entries + title: DAGWarningCollectionResponse + description: DAG warning collection serializer for responses. + DAGWarningResponse: + properties: + dag_id: + type: string + title: Dag Id + warning_type: + $ref: '#/components/schemas/DagWarningType' + message: + type: string + title: Message + timestamp: + type: string + format: date-time + title: Timestamp + type: object + required: + - dag_id + - warning_type + - message + - timestamp + title: DAGWarningResponse + description: DAG Warning serializer for responses. + DAGWithLatestDagRunsCollectionResponse: + properties: + total_entries: + type: integer + title: Total Entries + dags: + items: + $ref: '#/components/schemas/DAGWithLatestDagRunsResponse' + type: array + title: Dags + type: object + required: + - total_entries + - dags + title: DAGWithLatestDagRunsCollectionResponse + description: DAG with latest dag runs collection response serializer. + DAGWithLatestDagRunsResponse: + properties: + dag_id: + type: string + title: Dag Id + dag_display_name: + type: string + title: Dag Display Name + is_paused: + type: boolean + title: Is Paused + is_active: + type: boolean + title: Is Active + last_parsed_time: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Parsed Time + last_pickled: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Pickled + last_expired: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Last Expired + pickle_id: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Pickle Id + default_view: + anyOf: + - type: string + - type: 'null' + title: Default View + fileloc: + type: string + title: Fileloc + description: + anyOf: + - type: string + - type: 'null' + title: Description + timetable_summary: + anyOf: + - type: string + - type: 'null' + title: Timetable Summary + timetable_description: + anyOf: + - type: string + - type: 'null' + title: Timetable Description + tags: + items: + $ref: '#/components/schemas/DagTagPydantic' + type: array + title: Tags + max_active_tasks: + type: integer + title: Max Active Tasks + max_active_runs: + anyOf: + - type: integer + - type: 'null' + title: Max Active Runs + max_consecutive_failed_dag_runs: + type: integer + title: Max Consecutive Failed Dag Runs + has_task_concurrency_limits: + type: boolean + title: Has Task Concurrency Limits + has_import_errors: + type: boolean + title: Has Import Errors + next_dagrun: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun + next_dagrun_data_interval_start: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Data Interval Start + next_dagrun_data_interval_end: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Data Interval End + next_dagrun_create_after: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Next Dagrun Create After + owners: + items: + type: string + type: array + title: Owners + latest_dag_runs: + items: + $ref: '#/components/schemas/DAGRunResponse' + type: array + title: Latest Dag Runs + file_token: + type: string + title: File Token + description: Return file token. + readOnly: true + type: object + required: + - dag_id + - dag_display_name + - is_paused + - is_active + - last_parsed_time + - last_pickled + - last_expired + - pickle_id + - default_view + - fileloc + - description + - timetable_summary + - timetable_description + - tags + - max_active_tasks + - max_active_runs + - max_consecutive_failed_dag_runs + - has_task_concurrency_limits + - has_import_errors + - next_dagrun + - next_dagrun_data_interval_start + - next_dagrun_data_interval_end + - next_dagrun_create_after + - owners + - latest_dag_runs + - file_token + title: DAGWithLatestDagRunsResponse + description: DAG with latest dag runs response serializer. DagProcessorInfoSchema: properties: status: @@ -1621,7 +3151,7 @@ components: - ui - test - timetable - - dataset + - asset - backfill title: DagRunTriggeredByType description: Class with TriggeredBy types for DagRun. @@ -1631,7 +3161,7 @@ components: - backfill - scheduled - manual - - dataset_triggered + - asset_triggered title: DagRunType description: Class with DagRun types. DagTagPydantic: @@ -1649,6 +3179,105 @@ components: title: DagTagPydantic description: Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API. + DagWarningType: + type: string + enum: + - asset conflict + - non-existent pool + title: DagWarningType + description: 'Enum for DAG warning types. + + + This is the set of allowable values for the ``warning_type`` field + + in the DagWarning model.' + EventLogResponse: + properties: + event_log_id: + type: integer + title: Event Log Id + when: + type: string + format: date-time + title: When + dag_id: + anyOf: + - type: string + - type: 'null' + title: Dag Id + task_id: + anyOf: + - type: string + - type: 'null' + title: Task Id + run_id: + anyOf: + - type: string + - type: 'null' + title: Run Id + map_index: + anyOf: + - type: integer + - type: 'null' + title: Map Index + try_number: + anyOf: + - type: integer + - type: 'null' + title: Try Number + event: + type: string + title: Event + logical_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Logical Date + owner: + anyOf: + - type: string + - type: 'null' + title: Owner + extra: + anyOf: + - type: string + - type: 'null' + title: Extra + type: object + required: + - event_log_id + - when + - dag_id + - task_id + - run_id + - map_index + - try_number + - event + - logical_date + - owner + - extra + title: EventLogResponse + description: Event Log Response. + FastAPIAppResponse: + properties: + app: + type: string + title: App + url_prefix: + type: string + title: Url Prefix + name: + type: string + title: Name + additionalProperties: true + type: object + required: + - app + - url_prefix + - name + title: FastAPIAppResponse + description: Serializer for Plugin FastAPI App responses. HTTPExceptionResponse: properties: detail: @@ -1682,27 +3311,342 @@ components: $ref: '#/components/schemas/DagProcessorInfoSchema' type: object required: - - metadatabase - - scheduler - - triggerer - - dag_processor - title: HealthInfoSchema - description: Schema for the Health endpoint. - HistoricalMetricDataResponse: + - metadatabase + - scheduler + - triggerer + - dag_processor + title: HealthInfoSchema + description: Schema for the Health endpoint. + HistoricalMetricDataResponse: + properties: + dag_run_types: + $ref: '#/components/schemas/DAGRunTypes' + dag_run_states: + $ref: '#/components/schemas/DAGRunStates' + task_instance_states: + $ref: '#/components/schemas/airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState' + type: object + required: + - dag_run_types + - dag_run_states + - task_instance_states + title: HistoricalMetricDataResponse + description: Historical Metric Data serializer for responses. + JobResponse: + properties: + id: + type: integer + title: Id + dag_id: + anyOf: + - type: string + - type: 'null' + title: Dag Id + state: + anyOf: + - type: string + - type: 'null' + title: State + job_type: + anyOf: + - type: string + - type: 'null' + title: Job Type + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + latest_heartbeat: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Latest Heartbeat + executor_class: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Executor Class + hostname: + anyOf: + - type: string + - type: 'null' + title: Hostname + unixname: + anyOf: + - type: string + - type: 'null' + title: Unixname + type: object + required: + - id + - dag_id + - state + - job_type + - start_date + - end_date + - latest_heartbeat + - executor_class + - hostname + - unixname + title: JobResponse + description: Job serializer for responses. + PluginCollectionResponse: + properties: + plugins: + items: + $ref: '#/components/schemas/PluginResponse' + type: array + title: Plugins + total_entries: + type: integer + title: Total Entries + type: object + required: + - plugins + - total_entries + title: PluginCollectionResponse + description: Plugin Collection serializer. + PluginResponse: + properties: + name: + type: string + title: Name + macros: + items: + type: string + type: array + title: Macros + flask_blueprints: + items: + type: string + type: array + title: Flask Blueprints + fastapi_apps: + items: + $ref: '#/components/schemas/FastAPIAppResponse' + type: array + title: Fastapi Apps + appbuilder_views: + items: + $ref: '#/components/schemas/AppBuilderViewResponse' + type: array + title: Appbuilder Views + appbuilder_menu_items: + items: + $ref: '#/components/schemas/AppBuilderMenuItemResponse' + type: array + title: Appbuilder Menu Items + global_operator_extra_links: + items: + type: string + type: array + title: Global Operator Extra Links + operator_extra_links: + items: + type: string + type: array + title: Operator Extra Links + source: + type: string + title: Source + ti_deps: + items: + type: string + type: array + title: Ti Deps + listeners: + items: + type: string + type: array + title: Listeners + timetables: + items: + type: string + type: array + title: Timetables + type: object + required: + - name + - macros + - flask_blueprints + - fastapi_apps + - appbuilder_views + - appbuilder_menu_items + - global_operator_extra_links + - operator_extra_links + - source + - ti_deps + - listeners + - timetables + title: PluginResponse + description: Plugin serializer. + PoolCollectionResponse: + properties: + pools: + items: + $ref: '#/components/schemas/PoolResponse' + type: array + title: Pools + total_entries: + type: integer + title: Total Entries + type: object + required: + - pools + - total_entries + title: PoolCollectionResponse + description: Pool Collection serializer for responses. + PoolPatchBody: + properties: + pool: + anyOf: + - type: string + - type: 'null' + title: Pool + slots: + anyOf: + - type: integer + - type: 'null' + title: Slots + description: + anyOf: + - type: string + - type: 'null' + title: Description + include_deferred: + anyOf: + - type: boolean + - type: 'null' + title: Include Deferred + type: object + title: PoolPatchBody + description: Pool serializer for patch bodies. + PoolPostBody: + properties: + name: + type: string + title: Name + slots: + type: integer + title: Slots + description: + anyOf: + - type: string + - type: 'null' + title: Description + include_deferred: + type: boolean + title: Include Deferred + default: false + type: object + required: + - name + - slots + title: PoolPostBody + description: Pool serializer for post bodies. + PoolResponse: + properties: + name: + type: string + title: Name + slots: + type: integer + title: Slots + description: + anyOf: + - type: string + - type: 'null' + title: Description + include_deferred: + type: boolean + title: Include Deferred + occupied_slots: + type: integer + title: Occupied Slots + running_slots: + type: integer + title: Running Slots + queued_slots: + type: integer + title: Queued Slots + scheduled_slots: + type: integer + title: Scheduled Slots + open_slots: + type: integer + title: Open Slots + deferred_slots: + type: integer + title: Deferred Slots + type: object + required: + - name + - slots + - description + - include_deferred + - occupied_slots + - running_slots + - queued_slots + - scheduled_slots + - open_slots + - deferred_slots + title: PoolResponse + description: Pool serializer for responses. + ProviderCollectionResponse: + properties: + providers: + items: + $ref: '#/components/schemas/ProviderResponse' + type: array + title: Providers + total_entries: + type: integer + title: Total Entries + type: object + required: + - providers + - total_entries + title: ProviderCollectionResponse + description: Provider Collection serializer for responses. + ProviderResponse: properties: - dag_run_types: - $ref: '#/components/schemas/DAGRunTypes' - dag_run_states: - $ref: '#/components/schemas/DAGRunStates' - task_instance_states: - $ref: '#/components/schemas/TaskInstanceState' + package_name: + type: string + title: Package Name + description: + type: string + title: Description + version: + type: string + title: Version type: object required: - - dag_run_types - - dag_run_states - - task_instance_states - title: HistoricalMetricDataResponse - description: Historical Metric Data serializer for responses. + - package_name + - description + - version + title: ProviderResponse + description: Provider serializer for responses. + ReprocessBehavior: + type: string + enum: + - failed + - completed + - none + title: ReprocessBehavior + description: 'Internal enum for setting reprocess behavior in a backfill. + + + :meta private:' SchedulerInfoSchema: properties: status: @@ -1721,64 +3665,186 @@ components: - latest_scheduler_heartbeat title: SchedulerInfoSchema description: Schema for Scheduler info. - TaskInstanceState: + TaskInstanceResponse: properties: - no_status: - type: integer - title: No Status - removed: - type: integer - title: Removed - scheduled: - type: integer - title: Scheduled - queued: - type: integer - title: Queued - running: - type: integer - title: Running - success: - type: integer - title: Success - restarting: - type: integer - title: Restarting - failed: - type: integer - title: Failed - up_for_retry: - type: integer - title: Up For Retry - up_for_reschedule: + task_id: + type: string + title: Task Id + dag_id: + type: string + title: Dag Id + dag_run_id: + type: string + title: Dag Run Id + map_index: type: integer - title: Up For Reschedule - upstream_failed: + title: Map Index + logical_date: + type: string + format: date-time + title: Logical Date + start_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Start Date + end_date: + anyOf: + - type: string + format: date-time + - type: 'null' + title: End Date + duration: + anyOf: + - type: number + - type: 'null' + title: Duration + state: + anyOf: + - $ref: '#/components/schemas/airflow__utils__state__TaskInstanceState' + - type: 'null' + try_number: type: integer - title: Upstream Failed - skipped: + title: Try Number + max_tries: type: integer - title: Skipped - deferred: + title: Max Tries + task_display_name: + type: string + title: Task Display Name + hostname: + anyOf: + - type: string + - type: 'null' + title: Hostname + unixname: + anyOf: + - type: string + - type: 'null' + title: Unixname + pool: + type: string + title: Pool + pool_slots: type: integer - title: Deferred + title: Pool Slots + queue: + anyOf: + - type: string + - type: 'null' + title: Queue + priority_weight: + anyOf: + - type: integer + - type: 'null' + title: Priority Weight + operator: + anyOf: + - type: string + - type: 'null' + title: Operator + queued_when: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Queued When + pid: + anyOf: + - type: integer + - type: 'null' + title: Pid + executor: + anyOf: + - type: string + - type: 'null' + title: Executor + executor_config: + type: string + title: Executor Config + note: + anyOf: + - type: string + - type: 'null' + title: Note + rendered_map_index: + anyOf: + - type: string + - type: 'null' + title: Rendered Map Index + rendered_fields: + type: object + title: Rendered Fields + default: {} + trigger: + anyOf: + - $ref: '#/components/schemas/TriggerResponse' + - type: 'null' + triggerer_job: + anyOf: + - $ref: '#/components/schemas/JobResponse' + - type: 'null' type: object required: - - no_status - - removed - - scheduled - - queued - - running - - success - - restarting - - failed - - up_for_retry - - up_for_reschedule - - upstream_failed - - skipped - - deferred - title: TaskInstanceState + - task_id + - dag_id + - dag_run_id + - map_index + - logical_date + - start_date + - end_date + - duration + - state + - try_number + - max_tries + - task_display_name + - hostname + - unixname + - pool + - pool_slots + - queue + - priority_weight + - operator + - queued_when + - pid + - executor + - executor_config + - note + - rendered_map_index + - trigger + - triggerer_job + title: TaskInstanceResponse description: TaskInstance serializer for responses. + TriggerResponse: + properties: + id: + type: integer + title: Id + classpath: + type: string + title: Classpath + kwargs: + type: string + title: Kwargs + created_date: + type: string + format: date-time + title: Created Date + triggerer_id: + anyOf: + - type: integer + - type: 'null' + title: Triggerer Id + type: object + required: + - id + - classpath + - kwargs + - created_date + - triggerer_id + title: TriggerResponse + description: Trigger serializer for responses. TriggererInfoSchema: properties: status: @@ -1840,6 +3906,22 @@ components: - value title: VariableBody description: Variable serializer for bodies. + VariableCollectionResponse: + properties: + variables: + items: + $ref: '#/components/schemas/VariableResponse' + type: array + title: Variables + total_entries: + type: integer + title: Total Entries + type: object + required: + - variables + - total_entries + title: VariableCollectionResponse + description: Variable Collection serializer for responses. VariableResponse: properties: key: @@ -1862,3 +3944,97 @@ components: - value title: VariableResponse description: Variable serializer for responses. + VersionInfo: + properties: + version: + type: string + title: Version + git_version: + anyOf: + - type: string + - type: 'null' + title: Git Version + type: object + required: + - version + - git_version + title: VersionInfo + description: Version information serializer for responses. + airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState: + properties: + no_status: + type: integer + title: No Status + removed: + type: integer + title: Removed + scheduled: + type: integer + title: Scheduled + queued: + type: integer + title: Queued + running: + type: integer + title: Running + success: + type: integer + title: Success + restarting: + type: integer + title: Restarting + failed: + type: integer + title: Failed + up_for_retry: + type: integer + title: Up For Retry + up_for_reschedule: + type: integer + title: Up For Reschedule + upstream_failed: + type: integer + title: Upstream Failed + skipped: + type: integer + title: Skipped + deferred: + type: integer + title: Deferred + type: object + required: + - no_status + - removed + - scheduled + - queued + - running + - success + - restarting + - failed + - up_for_retry + - up_for_reschedule + - upstream_failed + - skipped + - deferred + title: TaskInstanceState + description: TaskInstance serializer for responses. + airflow__utils__state__TaskInstanceState: + type: string + enum: + - removed + - scheduled + - queued + - running + - success + - restarting + - failed + - up_for_retry + - up_for_reschedule + - upstream_failed + - skipped + - deferred + title: TaskInstanceState + description: 'All possible states that a Task Instance can be in. + + + Note that None is also allowed, so always use this in a type hint with Optional.' diff --git a/airflow/api_fastapi/core_api/routes/public/__init__.py b/airflow/api_fastapi/core_api/routes/public/__init__.py index 3d43a7bbb0efb..a443f5a28ae8c 100644 --- a/airflow/api_fastapi/core_api/routes/public/__init__.py +++ b/airflow/api_fastapi/core_api/routes/public/__init__.py @@ -18,17 +18,37 @@ from __future__ import annotations from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.routes.public.backfills import backfills_router from airflow.api_fastapi.core_api.routes.public.connections import connections_router from airflow.api_fastapi.core_api.routes.public.dag_run import dag_run_router +from airflow.api_fastapi.core_api.routes.public.dag_sources import dag_sources_router +from airflow.api_fastapi.core_api.routes.public.dag_warning import dag_warning_router from airflow.api_fastapi.core_api.routes.public.dags import dags_router +from airflow.api_fastapi.core_api.routes.public.event_logs import event_logs_router from airflow.api_fastapi.core_api.routes.public.monitor import monitor_router +from airflow.api_fastapi.core_api.routes.public.plugins import plugins_router +from airflow.api_fastapi.core_api.routes.public.pools import pools_router +from airflow.api_fastapi.core_api.routes.public.providers import providers_router +from airflow.api_fastapi.core_api.routes.public.task_instances import task_instances_router from airflow.api_fastapi.core_api.routes.public.variables import variables_router +from airflow.api_fastapi.core_api.routes.public.version import version_router public_router = AirflowRouter(prefix="/public") +public_router.include_router(backfills_router) public_router.include_router(dags_router) public_router.include_router(connections_router) -public_router.include_router(variables_router) public_router.include_router(dag_run_router) +public_router.include_router(dag_sources_router) +public_router.include_router(dags_router) +public_router.include_router(event_logs_router) public_router.include_router(monitor_router) +public_router.include_router(dag_warning_router) +public_router.include_router(plugins_router) +public_router.include_router(pools_router) +public_router.include_router(providers_router) +public_router.include_router(task_instances_router) +public_router.include_router(variables_router) +public_router.include_router(variables_router) +public_router.include_router(version_router) diff --git a/airflow/api_fastapi/core_api/routes/public/backfills.py b/airflow/api_fastapi/core_api/routes/public/backfills.py new file mode 100644 index 0000000000000..f6fe531d00638 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/backfills.py @@ -0,0 +1,187 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, HTTPException +from sqlalchemy import select, update +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import get_session, paginated_select +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import ( + create_openapi_http_exception_doc, +) +from airflow.api_fastapi.core_api.serializers.backfills import ( + BackfillCollectionResponse, + BackfillPostBody, + BackfillResponse, +) +from airflow.models import DagRun +from airflow.models.backfill import ( + AlreadyRunningBackfill, + Backfill, + BackfillDagRun, + _create_backfill, +) +from airflow.utils import timezone +from airflow.utils.state import DagRunState + +backfills_router = AirflowRouter(tags=["Backfill"], prefix="/backfills") + + +@backfills_router.get( + path="/", + responses=create_openapi_http_exception_doc([401, 403]), +) +async def list_backfills( + dag_id: str, + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends(SortParam(["id"], Backfill).dynamic_depends()), + ], + session: Annotated[Session, Depends(get_session)], +): + select_stmt, total_entries = paginated_select( + select(Backfill).where(Backfill.dag_id == dag_id), + [], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + backfills = session.scalars(select_stmt).all() + + return BackfillCollectionResponse( + backfills=[BackfillResponse.model_validate(x, from_attributes=True) for x in backfills], + total_entries=len(backfills), + ) + + +@backfills_router.get( + path="/{backfill_id}", + responses=create_openapi_http_exception_doc([401, 403, 404]), +) +async def get_backfill( + backfill_id: str, + session: Annotated[Session, Depends(get_session)], +): + backfill = session.get(Backfill, backfill_id) + if backfill: + return BackfillResponse.model_validate(backfill, from_attributes=True) + raise HTTPException(404, "Backfill not found") + + +@backfills_router.put( + path="/{backfill_id}/pause", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def pause_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): + b = session.get(Backfill, backfill_id) + if not b: + raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + if b.completed_at: + raise HTTPException(409, "Backfill is already completed.") + if b.is_paused is False: + b.is_paused = True + session.commit() + return BackfillResponse.model_validate(b, from_attributes=True) + + +@backfills_router.put( + path="/{backfill_id}/unpause", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def unpause_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): + b = session.get(Backfill, backfill_id) + if not b: + raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + if b.completed_at: + raise HTTPException(409, "Backfill is already completed.") + if b.is_paused: + b.is_paused = False + return BackfillResponse.model_validate(b, from_attributes=True) + + +@backfills_router.put( + path="/{backfill_id}/cancel", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def cancel_backfill(*, backfill_id, session: Annotated[Session, Depends(get_session)]): + b: Backfill = session.get(Backfill, backfill_id) + if not b: + raise HTTPException(404, f"Could not find backfill with id {backfill_id}") + if b.completed_at is not None: + raise HTTPException(409, "Backfill is already completed.") + + # first, pause, and commit immediately to ensure no other dag runs are started + if not b.is_paused: + b.is_paused = True + session.commit() # ensure no new runs started + + query = ( + update(DagRun) + .where( + DagRun.id.in_( + select( + BackfillDagRun.dag_run_id, + ).where( + BackfillDagRun.backfill_id == b.id, + ), + ), + DagRun.state == DagRunState.QUEUED, + ) + .values(state=DagRunState.FAILED) + .execution_options(synchronize_session=False) + ) + session.execute(query) + session.commit() # this will fail all the queued dag runs in this backfill + + # this is in separate transaction just to avoid potential conflicts + session.refresh(b) + b.completed_at = timezone.utcnow() + return BackfillResponse.model_validate(b, from_attributes=True) + + +@backfills_router.post( + path="/", + responses=create_openapi_http_exception_doc([401, 403, 404, 409]), +) +async def create_backfill( + backfill_request: BackfillPostBody, +): + from_date = timezone.coerce_datetime(backfill_request.from_date) + to_date = timezone.coerce_datetime(backfill_request.to_date) + try: + backfill_obj = _create_backfill( + dag_id=backfill_request.dag_id, + from_date=from_date, + to_date=to_date, + max_active_runs=backfill_request.max_active_runs, + reverse=backfill_request.run_backwards, + dag_run_conf=backfill_request.dag_run_conf, + reprocess_behavior=backfill_request.reprocess_behavior, + ) + return BackfillResponse.model_validate(backfill_obj, from_attributes=True) + except AlreadyRunningBackfill: + raise HTTPException( + status_code=409, + detail=f"There is already a running backfill for dag {backfill_request.dag_id}", + ) diff --git a/airflow/api_fastapi/core_api/routes/public/dag_run.py b/airflow/api_fastapi/core_api/routes/public/dag_run.py index 035d1b7fd7dc2..02780d6088e94 100644 --- a/airflow/api_fastapi/core_api/routes/public/dag_run.py +++ b/airflow/api_fastapi/core_api/routes/public/dag_run.py @@ -17,16 +17,25 @@ from __future__ import annotations -from fastapi import Depends, HTTPException +from fastapi import Depends, HTTPException, Query, Request from sqlalchemy import select from sqlalchemy.orm import Session from typing_extensions import Annotated +from airflow.api.common.mark_tasks import ( + set_dag_run_state_to_failed, + set_dag_run_state_to_queued, + set_dag_run_state_to_success, +) from airflow.api_fastapi.common.db.common import get_session from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.core_api.serializers.dag_run import DAGRunResponse -from airflow.models import DagRun +from airflow.api_fastapi.core_api.serializers.dag_run import ( + DAGRunPatchBody, + DAGRunPatchStates, + DAGRunResponse, +) +from airflow.models import DAG, DagRun dag_run_router = AirflowRouter(tags=["DagRun"], prefix="/dags/{dag_id}/dagRuns") @@ -57,3 +66,45 @@ async def delete_dag_run(dag_id: str, dag_run_id: str, session: Annotated[Sessio ) session.delete(dag_run) + + +@dag_run_router.patch("/{dag_run_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +async def patch_dag_run_state( + dag_id: str, + dag_run_id: str, + patch_body: DAGRunPatchBody, + session: Annotated[Session, Depends(get_session)], + request: Request, + update_mask: list[str] | None = Query(None), +) -> DAGRunResponse: + """Modify a DAG Run.""" + dag_run = session.scalar(select(DagRun).filter_by(dag_id=dag_id, run_id=dag_run_id)) + if dag_run is None: + raise HTTPException( + 404, f"The DagRun with dag_id: `{dag_id}` and run_id: `{dag_run_id}` was not found" + ) + + dag: DAG = request.app.state.dag_bag.get_dag(dag_id) + + if not dag: + raise HTTPException(404, f"Dag with id {dag_id} was not found") + + if update_mask: + if update_mask != ["state"]: + raise HTTPException(400, "Only `state` field can be updated through the REST API") + else: + update_mask = ["state"] + + for attr_name in update_mask: + if attr_name == "state": + state = getattr(patch_body, attr_name) + if state == DAGRunPatchStates.SUCCESS: + set_dag_run_state_to_success(dag=dag, run_id=dag_run.run_id, commit=True) + elif state == DAGRunPatchStates.QUEUED: + set_dag_run_state_to_queued(dag=dag, run_id=dag_run.run_id, commit=True) + else: + set_dag_run_state_to_failed(dag=dag, run_id=dag_run.run_id, commit=True) + + dag_run = session.get(DagRun, dag_run.id) + + return DAGRunResponse.model_validate(dag_run, from_attributes=True) diff --git a/airflow/api_fastapi/core_api/routes/public/dag_sources.py b/airflow/api_fastapi/core_api/routes/public/dag_sources.py new file mode 100644 index 0000000000000..2a660a7d0264e --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/dag_sources.py @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, Header, HTTPException, Request, Response +from itsdangerous import BadSignature, URLSafeSerializer +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import get_session +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.dag_sources import DAGSourceResponse +from airflow.models.dagcode import DagCode + +dag_sources_router = AirflowRouter(tags=["DagSource"], prefix="/dagSources") + +mime_type_text = "text/plain" +mime_type_json = "application/json" +mime_type_any = "*/*" + + +@dag_sources_router.get( + "/{file_token}", + responses={ + **create_openapi_http_exception_doc([400, 401, 403, 404, 406]), + "200": { + "description": "Successful Response", + "content": { + mime_type_text: {"schema": {"type": "string", "example": "dag code"}}, + }, + }, + }, + response_model=DAGSourceResponse, +) +async def get_dag_source( + file_token: str, + session: Annotated[Session, Depends(get_session)], + request: Request, + accept: Annotated[str, Header()] = mime_type_any, +): + """Get source code using file token.""" + auth_s = URLSafeSerializer(request.app.state.secret_key) + + try: + path = auth_s.loads(file_token) + dag_source_model = DAGSourceResponse( + content=DagCode.code(path, session=session), + ) + except (BadSignature, FileNotFoundError): + raise HTTPException(404, "DAG source not found") + + if accept.startswith(mime_type_text): + return Response(dag_source_model.content, media_type=mime_type_text) + if accept.startswith(mime_type_json) or accept.startswith(mime_type_any): + return dag_source_model + raise HTTPException(406, "Content not available for Accept header") diff --git a/airflow/api_fastapi/core_api/routes/public/dag_warning.py b/airflow/api_fastapi/core_api/routes/public/dag_warning.py new file mode 100644 index 0000000000000..a388fae13be18 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/dag_warning.py @@ -0,0 +1,72 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Depends +from sqlalchemy import select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import ( + get_session, + paginated_select, +) +from airflow.api_fastapi.common.parameters import ( + QueryDagIdInDagWarningFilter, + QueryLimit, + QueryOffset, + QueryWarningTypeFilter, + SortParam, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.dag_warning import ( + DAGWarningCollectionResponse, + DAGWarningResponse, +) +from airflow.models import DagWarning + +dag_warning_router = AirflowRouter(tags=["DagWarning"]) + + +@dag_warning_router.get("/dagWarnings", responses=create_openapi_http_exception_doc([401, 403])) +async def list_dag_warnings( + dag_id: QueryDagIdInDagWarningFilter, + warning_type: QueryWarningTypeFilter, + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends(SortParam(["dag_id", "warning_type", "message", "timestamp"], DagWarning).dynamic_depends()), + ], + session: Annotated[Session, Depends(get_session)], +) -> DAGWarningCollectionResponse: + """Get a list of DAG warnings.""" + dag_warnings_select, total_entries = paginated_select( + select(DagWarning), [warning_type, dag_id], order_by, offset, limit, session + ) + + dag_warnings = session.scalars(dag_warnings_select).all() + + return DAGWarningCollectionResponse( + dag_warnings=[ + DAGWarningResponse.model_validate(dag_warning, from_attributes=True) + for dag_warning in dag_warnings + ], + total_entries=total_entries, + ) diff --git a/airflow/api_fastapi/core_api/routes/public/dags.py b/airflow/api_fastapi/core_api/routes/public/dags.py index 81293211fc92e..c7b753b5cdbd9 100644 --- a/airflow/api_fastapi/core_api/routes/public/dags.py +++ b/airflow/api_fastapi/core_api/routes/public/dags.py @@ -18,7 +18,7 @@ from __future__ import annotations from fastapi import Depends, HTTPException, Query, Request, Response -from sqlalchemy import update +from sqlalchemy import select, update from sqlalchemy.orm import Session from typing_extensions import Annotated @@ -32,6 +32,7 @@ QueryDagDisplayNamePatternSearch, QueryDagIdPatternSearch, QueryDagIdPatternSearchWithNone, + QueryDagTagPatternSearch, QueryLastDagRunStateFilter, QueryLimit, QueryOffset, @@ -48,9 +49,10 @@ DAGDetailsResponse, DAGPatchBody, DAGResponse, + DAGTagCollectionResponse, ) from airflow.exceptions import AirflowException, DagNotFound -from airflow.models import DAG, DagModel +from airflow.models import DAG, DagModel, DagTag dags_router = AirflowRouter(tags=["DAG"], prefix="/dags") @@ -95,6 +97,39 @@ async def get_dags( ) +@dags_router.get( + "/tags", + responses=create_openapi_http_exception_doc([401, 403]), +) +async def get_dag_tags( + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + ["name"], + DagTag, + ).dynamic_depends() + ), + ], + tag_name_pattern: QueryDagTagPatternSearch, + session: Annotated[Session, Depends(get_session)], +) -> DAGTagCollectionResponse: + """Get all DAG tags.""" + base_select = select(DagTag.name).group_by(DagTag.name) + dag_tags_select, total_entries = paginated_select( + base_select=base_select, + filters=[tag_name_pattern], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + dag_tags = session.execute(dag_tags_select).scalars().all() + return DAGTagCollectionResponse(tags=[dag_tag for dag_tag in dag_tags], total_entries=total_entries) + + @dags_router.get("/{dag_id}", responses=create_openapi_http_exception_doc([400, 401, 403, 404, 422])) async def get_dag( dag_id: str, session: Annotated[Session, Depends(get_session)], request: Request diff --git a/airflow/api_fastapi/core_api/routes/public/event_logs.py b/airflow/api_fastapi/core_api/routes/public/event_logs.py new file mode 100644 index 0000000000000..75f12cbefb03b --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/event_logs.py @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, HTTPException +from sqlalchemy import select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import ( + get_session, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.event_logs import ( + EventLogResponse, +) +from airflow.models import Log + +event_logs_router = AirflowRouter(tags=["Event Log"], prefix="/eventLogs") + + +@event_logs_router.get( + "/{event_log_id}", + responses=create_openapi_http_exception_doc([401, 403, 404]), +) +async def get_event_log( + event_log_id: int, + session: Annotated[Session, Depends(get_session)], +) -> EventLogResponse: + event_log = session.scalar(select(Log).where(Log.id == event_log_id)) + if event_log is None: + raise HTTPException(404, f"The Event Log with id: `{event_log_id}` not found") + return EventLogResponse.model_validate( + event_log, + from_attributes=True, + ) diff --git a/airflow/api_fastapi/core_api/routes/public/plugins.py b/airflow/api_fastapi/core_api/routes/public/plugins.py new file mode 100644 index 0000000000000..c264e748373cf --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/plugins.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.serializers.plugins import PluginCollectionResponse, PluginResponse +from airflow.plugins_manager import get_plugin_info + +plugins_router = AirflowRouter(tags=["Plugin"], prefix="/plugins") + + +@plugins_router.get("/") +async def get_plugins( + limit: QueryLimit, + offset: QueryOffset, +) -> PluginCollectionResponse: + plugins_info = sorted(get_plugin_info(), key=lambda x: x["name"]) + return PluginCollectionResponse( + plugins=[ + PluginResponse.model_validate(plugin_info) + for plugin_info in plugins_info[offset.value :][: limit.value] + ], + total_entries=len(plugins_info), + ) diff --git a/airflow/api_fastapi/core_api/routes/public/pools.py b/airflow/api_fastapi/core_api/routes/public/pools.py new file mode 100644 index 0000000000000..5690196e850a5 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/pools.py @@ -0,0 +1,152 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from fastapi import Depends, HTTPException, Query +from fastapi.exceptions import RequestValidationError +from pydantic import ValidationError +from sqlalchemy import delete, select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import get_session, paginated_select +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.pools import ( + BasePool, + PoolCollectionResponse, + PoolPatchBody, + PoolPostBody, + PoolResponse, +) +from airflow.models.pool import Pool + +pools_router = AirflowRouter(tags=["Pool"], prefix="/pools") + + +@pools_router.delete( + "/{pool_name}", + status_code=204, + responses=create_openapi_http_exception_doc([400, 401, 403, 404]), +) +async def delete_pool( + pool_name: str, + session: Annotated[Session, Depends(get_session)], +): + """Delete a pool entry.""" + if pool_name == "default_pool": + raise HTTPException(400, "Default Pool can't be deleted") + + affected_count = session.execute(delete(Pool).where(Pool.pool == pool_name)).rowcount + + if affected_count == 0: + raise HTTPException(404, f"The Pool with name: `{pool_name}` was not found") + + +@pools_router.get( + "/{pool_name}", + responses=create_openapi_http_exception_doc([401, 403, 404]), +) +async def get_pool( + pool_name: str, + session: Annotated[Session, Depends(get_session)], +) -> PoolResponse: + """Get a pool.""" + pool = session.scalar(select(Pool).where(Pool.pool == pool_name)) + if pool is None: + raise HTTPException(404, f"The Pool with name: `{pool_name}` was not found") + + return PoolResponse.model_validate(pool, from_attributes=True) + + +@pools_router.get( + "/", + responses=create_openapi_http_exception_doc([401, 403, 404]), +) +async def get_pools( + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends(SortParam(["id", "name"], Pool).dynamic_depends()), + ], + session: Annotated[Session, Depends(get_session)], +) -> PoolCollectionResponse: + """Get all pools entries.""" + pools_select, total_entries = paginated_select( + select(Pool), + [], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + + pools = session.scalars(pools_select).all() + + return PoolCollectionResponse( + pools=[PoolResponse.model_validate(pool, from_attributes=True) for pool in pools], + total_entries=total_entries, + ) + + +@pools_router.patch("/{pool_name}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) +async def patch_pool( + pool_name: str, + patch_body: PoolPatchBody, + session: Annotated[Session, Depends(get_session)], + update_mask: list[str] | None = Query(None), +) -> PoolResponse: + """Update a Pool.""" + # Only slots and include_deferred can be modified in 'default_pool' + if pool_name == Pool.DEFAULT_POOL_NAME: + if update_mask and all(mask.strip() in {"slots", "include_deferred"} for mask in update_mask): + pass + else: + raise HTTPException(400, "Only slots and included_deferred can be modified on Default Pool") + + pool = session.scalar(select(Pool).where(Pool.pool == pool_name).limit(1)) + if not pool: + raise HTTPException(404, detail=f"The Pool with name: `{pool_name}` was not found") + + if update_mask: + data = patch_body.model_dump(include=set(update_mask), by_alias=True) + else: + data = patch_body.model_dump(by_alias=True) + try: + BasePool.model_validate(data) + except ValidationError as e: + raise RequestValidationError(errors=e.errors()) + + for key, value in data.items(): + setattr(pool, key, value) + + return PoolResponse.model_validate(pool, from_attributes=True) + + +@pools_router.post("/", status_code=201, responses=create_openapi_http_exception_doc([401, 403])) +async def post_pool( + post_body: PoolPostBody, + session: Annotated[Session, Depends(get_session)], +) -> PoolResponse: + """Create a Pool.""" + pool = Pool(**post_body.model_dump()) + + session.add(pool) + + return PoolResponse.model_validate(pool, from_attributes=True) diff --git a/airflow/api_fastapi/core_api/routes/public/providers.py b/airflow/api_fastapi/core_api/routes/public/providers.py new file mode 100644 index 0000000000000..6c01578dd5f69 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/providers.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import re2 + +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.serializers.providers import ProviderCollectionResponse, ProviderResponse +from airflow.providers_manager import ProviderInfo, ProvidersManager + +providers_router = AirflowRouter(tags=["Provider"], prefix="/providers") + + +def _remove_rst_syntax(value: str) -> str: + return re2.sub("[`_<>]", "", value.strip(" \n.")) + + +def _provider_mapper(provider: ProviderInfo) -> ProviderResponse: + return ProviderResponse( + package_name=provider.data["package-name"], + description=_remove_rst_syntax(provider.data["description"]), + version=provider.version, + ) + + +@providers_router.get("/") +async def get_providers( + limit: QueryLimit, + offset: QueryOffset, +) -> ProviderCollectionResponse: + """Get providers.""" + providers = sorted( + [_provider_mapper(d) for d in ProvidersManager().providers.values()], key=lambda x: x.package_name + ) + total_entries = len(providers) + + if limit.value is not None and offset.value is not None: + providers = providers[offset.value : offset.value + limit.value] + return ProviderCollectionResponse(providers=providers, total_entries=total_entries) diff --git a/airflow/api_fastapi/core_api/routes/public/task_instances.py b/airflow/api_fastapi/core_api/routes/public/task_instances.py new file mode 100644 index 0000000000000..c9458e843afee --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/task_instances.py @@ -0,0 +1,85 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Depends, HTTPException +from sqlalchemy.orm import Session, joinedload +from sqlalchemy.sql import select +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import get_session +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc +from airflow.api_fastapi.core_api.serializers.task_instances import TaskInstanceResponse +from airflow.models.taskinstance import TaskInstance as TI + +task_instances_router = AirflowRouter( + tags=["Task Instance"], prefix="/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances" +) + + +@task_instances_router.get("/{task_id}", responses=create_openapi_http_exception_doc([401, 403, 404])) +async def get_task_instance( + dag_id: str, dag_run_id: str, task_id: str, session: Annotated[Session, Depends(get_session)] +) -> TaskInstanceResponse: + """Get task instance.""" + query = ( + select(TI) + .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id) + .join(TI.dag_run) + .options(joinedload(TI.rendered_task_instance_fields)) + ) + task_instance = session.scalar(query) + + if task_instance is None: + raise HTTPException( + 404, + f"The Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}` and task_id: `{task_id}` was not found", + ) + if task_instance.map_index != -1: + raise HTTPException(404, "Task instance is mapped, add the map_index value to the URL") + + return TaskInstanceResponse.model_validate(task_instance, from_attributes=True) + + +@task_instances_router.get( + "/{task_id}/{map_index}", responses=create_openapi_http_exception_doc([401, 403, 404]) +) +async def get_mapped_task_instance( + dag_id: str, + dag_run_id: str, + task_id: str, + map_index: int, + session: Annotated[Session, Depends(get_session)], +) -> TaskInstanceResponse: + """Get task instance.""" + query = ( + select(TI) + .where(TI.dag_id == dag_id, TI.run_id == dag_run_id, TI.task_id == task_id, TI.map_index == map_index) + .join(TI.dag_run) + .options(joinedload(TI.rendered_task_instance_fields)) + ) + task_instance = session.scalar(query) + + if task_instance is None: + raise HTTPException( + 404, + f"The Mapped Task Instance with dag_id: `{dag_id}`, run_id: `{dag_run_id}`, task_id: `{task_id}`, and map_index: `{map_index}` was not found", + ) + + return TaskInstanceResponse.model_validate(task_instance, from_attributes=True) diff --git a/airflow/api_fastapi/core_api/routes/public/variables.py b/airflow/api_fastapi/core_api/routes/public/variables.py index 3a46c519c9234..6b834a6de7581 100644 --- a/airflow/api_fastapi/core_api/routes/public/variables.py +++ b/airflow/api_fastapi/core_api/routes/public/variables.py @@ -21,10 +21,15 @@ from sqlalchemy.orm import Session from typing_extensions import Annotated -from airflow.api_fastapi.common.db.common import get_session +from airflow.api_fastapi.common.db.common import get_session, paginated_select +from airflow.api_fastapi.common.parameters import QueryLimit, QueryOffset, SortParam from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.openapi.exceptions import create_openapi_http_exception_doc -from airflow.api_fastapi.core_api.serializers.variables import VariableBody, VariableResponse +from airflow.api_fastapi.core_api.serializers.variables import ( + VariableBody, + VariableCollectionResponse, + VariableResponse, +) from airflow.models.variable import Variable variables_router = AirflowRouter(tags=["Variable"], prefix="/variables") @@ -58,6 +63,42 @@ async def get_variable( return VariableResponse.model_validate(variable, from_attributes=True) +@variables_router.get( + "/", + responses=create_openapi_http_exception_doc([401, 403]), +) +async def get_variables( + limit: QueryLimit, + offset: QueryOffset, + order_by: Annotated[ + SortParam, + Depends( + SortParam( + ["key", "id"], + Variable, + ).dynamic_depends() + ), + ], + session: Annotated[Session, Depends(get_session)], +) -> VariableCollectionResponse: + """Get all Variables entries.""" + variable_select, total_entries = paginated_select( + select(Variable), + [], + order_by=order_by, + offset=offset, + limit=limit, + session=session, + ) + + variables = session.scalars(variable_select).all() + + return VariableCollectionResponse( + variables=[VariableResponse.model_validate(variable, from_attributes=True) for variable in variables], + total_entries=total_entries, + ) + + @variables_router.patch("/{variable_key}", responses=create_openapi_http_exception_doc([400, 401, 403, 404])) async def patch_variable( variable_key: str, @@ -78,7 +119,6 @@ async def patch_variable( data = patch_body.model_dump(exclude=non_update_fields) for key, val in data.items(): setattr(variable, key, val) - session.add(variable) return variable diff --git a/airflow/api_fastapi/core_api/routes/public/version.py b/airflow/api_fastapi/core_api/routes/public/version.py new file mode 100644 index 0000000000000..218e0b90702dd --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/public/version.py @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import airflow +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.serializers.version import VersionInfo +from airflow.utils.platform import get_airflow_git_version + +version_router = AirflowRouter(tags=["Version"], prefix="/version") + + +@version_router.get("/") +async def get_version() -> VersionInfo: + """Get version information.""" + airflow_version = airflow.__version__ + git_version = get_airflow_git_version() + version_info = VersionInfo(version=airflow_version, git_version=git_version) + return VersionInfo.model_validate(version_info) diff --git a/airflow/api_fastapi/core_api/routes/ui/__init__.py b/airflow/api_fastapi/core_api/routes/ui/__init__.py index 9cd16fcdd16b3..b7ebf9c5c46fc 100644 --- a/airflow/api_fastapi/core_api/routes/ui/__init__.py +++ b/airflow/api_fastapi/core_api/routes/ui/__init__.py @@ -18,9 +18,11 @@ from airflow.api_fastapi.common.router import AirflowRouter from airflow.api_fastapi.core_api.routes.ui.assets import assets_router +from airflow.api_fastapi.core_api.routes.ui.dags import dags_router from airflow.api_fastapi.core_api.routes.ui.dashboard import dashboard_router ui_router = AirflowRouter(prefix="/ui") ui_router.include_router(assets_router) ui_router.include_router(dashboard_router) +ui_router.include_router(dags_router) diff --git a/airflow/api_fastapi/core_api/routes/ui/assets.py b/airflow/api_fastapi/core_api/routes/ui/assets.py index b8a17c7398424..6786bc30ae680 100644 --- a/airflow/api_fastapi/core_api/routes/ui/assets.py +++ b/airflow/api_fastapi/core_api/routes/ui/assets.py @@ -56,11 +56,11 @@ async def next_run_assets( AssetModel.uri, func.max(AssetEvent.timestamp).label("lastUpdate"), ) - .join(DagScheduleAssetReference, DagScheduleAssetReference.dataset_id == AssetModel.id) + .join(DagScheduleAssetReference, DagScheduleAssetReference.asset_id == AssetModel.id) .join( AssetDagRunQueue, and_( - AssetDagRunQueue.dataset_id == AssetModel.id, + AssetDagRunQueue.asset_id == AssetModel.id, AssetDagRunQueue.target_dag_id == DagScheduleAssetReference.dag_id, ), isouter=True, @@ -68,7 +68,7 @@ async def next_run_assets( .join( AssetEvent, and_( - AssetEvent.dataset_id == AssetModel.id, + AssetEvent.asset_id == AssetModel.id, ( AssetEvent.timestamp >= latest_run.execution_date if latest_run and latest_run.execution_date @@ -82,5 +82,5 @@ async def next_run_assets( .order_by(AssetModel.uri) ) ] - data = {"dataset_expression": dag_model.dataset_expression, "events": events} + data = {"asset_expression": dag_model.asset_expression, "events": events} return data diff --git a/airflow/api_fastapi/core_api/routes/ui/dags.py b/airflow/api_fastapi/core_api/routes/ui/dags.py new file mode 100644 index 0000000000000..665373734bb90 --- /dev/null +++ b/airflow/api_fastapi/core_api/routes/ui/dags.py @@ -0,0 +1,133 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from fastapi import Depends +from sqlalchemy import and_, func, select +from sqlalchemy.orm import Session +from typing_extensions import Annotated + +from airflow.api_fastapi.common.db.common import ( + get_session, + paginated_select, +) +from airflow.api_fastapi.common.parameters import ( + QueryDagDisplayNamePatternSearch, + QueryDagIdPatternSearch, + QueryLastDagRunStateFilter, + QueryLimit, + QueryOffset, + QueryOnlyActiveFilter, + QueryOwnersFilter, + QueryPausedFilter, + QueryTagsFilter, +) +from airflow.api_fastapi.common.router import AirflowRouter +from airflow.api_fastapi.core_api.serializers.dag_run import DAGRunResponse +from airflow.api_fastapi.core_api.serializers.dags import DAGResponse +from airflow.api_fastapi.core_api.serializers.ui.dags import ( + DAGWithLatestDagRunsCollectionResponse, + DAGWithLatestDagRunsResponse, +) +from airflow.models import DagModel, DagRun + +dags_router = AirflowRouter(prefix="/dags", tags=["Dags"]) + + +@dags_router.get("/recent_dag_runs", include_in_schema=False, response_model_exclude_none=True) +async def recent_dag_runs( + limit: QueryLimit, + offset: QueryOffset, + tags: QueryTagsFilter, + owners: QueryOwnersFilter, + dag_id_pattern: QueryDagIdPatternSearch, + dag_display_name_pattern: QueryDagDisplayNamePatternSearch, + only_active: QueryOnlyActiveFilter, + paused: QueryPausedFilter, + last_dag_run_state: QueryLastDagRunStateFilter, + session: Annotated[Session, Depends(get_session)], + dag_runs_limit: int = 10, +) -> DAGWithLatestDagRunsCollectionResponse: + """Get recent DAG runs.""" + recent_runs_subquery = ( + select( + DagRun.dag_id, + DagRun.execution_date, + func.rank() + .over( + partition_by=DagRun.dag_id, + order_by=DagRun.execution_date.desc(), + ) + .label("rank"), + ) + .order_by(DagRun.execution_date.desc()) + .subquery() + ) + dags_with_recent_dag_runs_select = ( + select( + DagRun, + DagModel, + recent_runs_subquery.c.execution_date, + ) + .join(DagModel, DagModel.dag_id == recent_runs_subquery.c.dag_id) + .join( + DagRun, + and_( + DagRun.dag_id == DagModel.dag_id, + DagRun.execution_date == recent_runs_subquery.c.execution_date, + ), + ) + .where(recent_runs_subquery.c.rank <= dag_runs_limit) + .group_by( + DagModel.dag_id, + recent_runs_subquery.c.execution_date, + DagRun.execution_date, + DagRun.id, + ) + .order_by(recent_runs_subquery.c.execution_date.desc()) + ) + dags_with_recent_dag_runs_select_filter, _ = paginated_select( + dags_with_recent_dag_runs_select, + [only_active, paused, dag_id_pattern, dag_display_name_pattern, tags, owners, last_dag_run_state], + None, + offset, + limit, + ) + dags_with_recent_dag_runs = session.execute(dags_with_recent_dag_runs_select_filter) + # aggregate rows by dag_id + dag_runs_by_dag_id: dict[str, DAGWithLatestDagRunsResponse] = {} + + for row in dags_with_recent_dag_runs: + dag_run, dag, *_ = row + dag_id = dag.dag_id + dag_run_response = DAGRunResponse.model_validate(dag_run, from_attributes=True) + if dag_id not in dag_runs_by_dag_id: + dag_response = DAGResponse.model_validate(dag, from_attributes=True) + dag_runs_by_dag_id[dag_id] = DAGWithLatestDagRunsResponse.model_validate( + { + **dag_response.dict(), + "latest_dag_runs": [dag_run_response], + } + ) + else: + dag_runs_by_dag_id[dag_id].latest_dag_runs.append(dag_run_response) + + return DAGWithLatestDagRunsCollectionResponse( + total_entries=len(dag_runs_by_dag_id), + dags=list(dag_runs_by_dag_id.values()), + ) diff --git a/airflow/api_fastapi/core_api/serializers/backfills.py b/airflow/api_fastapi/core_api/serializers/backfills.py new file mode 100644 index 0000000000000..69d6a98ccfd1a --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/backfills.py @@ -0,0 +1,59 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + +from airflow.models.backfill import ReprocessBehavior + + +class BackfillPostBody(BaseModel): + """Object used for create backfill request.""" + + dag_id: str + from_date: datetime + to_date: datetime + run_backwards: bool = False + dag_run_conf: dict = {} + reprocess_behavior: ReprocessBehavior = ReprocessBehavior.NONE + max_active_runs: int = 10 + + +class BackfillResponse(BaseModel): + """Base serializer for Backfill.""" + + id: int + dag_id: str + from_date: datetime + to_date: datetime + dag_run_conf: dict + is_paused: bool + reprocess_behavior: ReprocessBehavior + max_active_runs: int + created_at: datetime + completed_at: datetime | None + updated_at: datetime + + +class BackfillCollectionResponse(BaseModel): + """Backfill Collection serializer for responses.""" + + backfills: list[BackfillResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/connections.py b/airflow/api_fastapi/core_api/serializers/connections.py index 1c801607299fe..1cc069cac0cb3 100644 --- a/airflow/api_fastapi/core_api/serializers/connections.py +++ b/airflow/api_fastapi/core_api/serializers/connections.py @@ -51,7 +51,7 @@ def redact_extra(cls, v: str | None) -> str | None: class ConnectionCollectionResponse(BaseModel): - """DAG Collection serializer for responses.""" + """Connection Collection serializer for responses.""" connections: list[ConnectionResponse] total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/dag_run.py b/airflow/api_fastapi/core_api/serializers/dag_run.py index 4622fac645c07..15576905611c3 100644 --- a/airflow/api_fastapi/core_api/serializers/dag_run.py +++ b/airflow/api_fastapi/core_api/serializers/dag_run.py @@ -18,6 +18,7 @@ from __future__ import annotations from datetime import datetime +from enum import Enum from pydantic import BaseModel, Field @@ -25,6 +26,20 @@ from airflow.utils.types import DagRunTriggeredByType, DagRunType +class DAGRunPatchStates(str, Enum): + """Enum for DAG Run states when updating a DAG Run.""" + + QUEUED = DagRunState.QUEUED + SUCCESS = DagRunState.SUCCESS + FAILED = DagRunState.FAILED + + +class DAGRunPatchBody(BaseModel): + """DAG Run Serializer for PATCH requests.""" + + state: DAGRunPatchStates + + class DAGRunResponse(BaseModel): """DAG Run serializer for responses.""" diff --git a/airflow/api_fastapi/core_api/serializers/dag_sources.py b/airflow/api_fastapi/core_api/serializers/dag_sources.py new file mode 100644 index 0000000000000..8cae02be1a858 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/dag_sources.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from pydantic import BaseModel + + +class DAGSourceResponse(BaseModel): + """DAG Source serializer for responses.""" + + content: str | None diff --git a/airflow/api/auth/backend/default.py b/airflow/api_fastapi/core_api/serializers/dag_warning.py similarity index 61% rename from airflow/api/auth/backend/default.py rename to airflow/api_fastapi/core_api/serializers/dag_warning.py index afe2c88f35f0c..f38a3a8d093f7 100644 --- a/airflow/api/auth/backend/default.py +++ b/airflow/api_fastapi/core_api/serializers/dag_warning.py @@ -1,4 +1,3 @@ -# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -15,28 +14,27 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Default authentication backend - everything is allowed.""" from __future__ import annotations -from functools import wraps -from typing import Any, Callable, TypeVar, cast - -CLIENT_AUTH: tuple[str, str] | Any | None = None +from datetime import datetime +from pydantic import BaseModel -def init_app(_): - """Initialize authentication backend.""" +from airflow.models.dagwarning import DagWarningType -T = TypeVar("T", bound=Callable) +class DAGWarningResponse(BaseModel): + """DAG Warning serializer for responses.""" + dag_id: str + warning_type: DagWarningType + message: str + timestamp: datetime -def requires_authentication(function: T): - """Decorate functions that require authentication.""" - @wraps(function) - def decorated(*args, **kwargs): - return function(*args, **kwargs) +class DAGWarningCollectionResponse(BaseModel): + """DAG warning collection serializer for responses.""" - return cast(T, decorated) + dag_warnings: list[DAGWarningResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/dags.py b/airflow/api_fastapi/core_api/serializers/dags.py index c9d48aac222eb..6e2c3933e176f 100644 --- a/airflow/api_fastapi/core_api/serializers/dags.py +++ b/airflow/api_fastapi/core_api/serializers/dags.py @@ -45,7 +45,6 @@ class DAGResponse(BaseModel): last_parsed_time: datetime | None last_pickled: datetime | None last_expired: datetime | None - scheduler_lock: datetime | None pickle_id: datetime | None default_view: str | None fileloc: str @@ -112,12 +111,11 @@ class DAGDetailsResponse(DAGResponse): catchup: bool dag_run_timeout: timedelta | None - dataset_expression: dict | None + asset_expression: dict | None doc_md: str | None start_date: datetime | None end_date: datetime | None is_paused_upon_creation: bool | None - orientation: str params: abc.MutableMapping | None render_template_as_native_obj: bool template_search_path: Iterable[str] | None @@ -156,3 +154,10 @@ def get_params(cls, params: abc.MutableMapping | None) -> dict | None: def concurrency(self) -> int: """Return max_active_tasks as concurrency.""" return self.max_active_tasks + + +class DAGTagCollectionResponse(BaseModel): + """DAG Tags Collection serializer for responses.""" + + tags: list[str] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/dashboard.py b/airflow/api_fastapi/core_api/serializers/dashboard.py index ee31a812945ec..66adc8ed3df92 100644 --- a/airflow/api_fastapi/core_api/serializers/dashboard.py +++ b/airflow/api_fastapi/core_api/serializers/dashboard.py @@ -25,7 +25,7 @@ class DAGRunTypes(BaseModel): backfill: int scheduled: int manual: int - dataset_triggered: int + asset_triggered: int class DAGRunStates(BaseModel): diff --git a/airflow/api_fastapi/core_api/serializers/event_logs.py b/airflow/api_fastapi/core_api/serializers/event_logs.py new file mode 100644 index 0000000000000..e295dc35061fb --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/event_logs.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, ConfigDict, Field + + +class EventLogResponse(BaseModel): + """Event Log Response.""" + + id: int = Field(alias="event_log_id") + dttm: datetime = Field(alias="when") + dag_id: str | None + task_id: str | None + run_id: str | None + map_index: int | None + try_number: int | None + event: str + execution_date: datetime | None = Field(alias="logical_date") + owner: str | None + extra: str | None + + model_config = ConfigDict(populate_by_name=True) diff --git a/airflow/api_fastapi/core_api/serializers/job.py b/airflow/api_fastapi/core_api/serializers/job.py new file mode 100644 index 0000000000000..e4d5ceb4b4e20 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/job.py @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, ConfigDict + + +class JobResponse(BaseModel): + """Job serializer for responses.""" + + model_config = ConfigDict(populate_by_name=True) + + id: int + dag_id: str | None + state: str | None + job_type: str | None + start_date: datetime | None + end_date: datetime | None + latest_heartbeat: datetime | None + executor_class: datetime | None + hostname: str | None + unixname: str | None diff --git a/airflow/api_fastapi/core_api/serializers/plugins.py b/airflow/api_fastapi/core_api/serializers/plugins.py new file mode 100644 index 0000000000000..e16b56a6aca06 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/plugins.py @@ -0,0 +1,91 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from typing import Any + +from pydantic import BaseModel, BeforeValidator, ConfigDict, field_validator +from typing_extensions import Annotated + +from airflow.plugins_manager import AirflowPluginSource + + +def coerce_to_string(data: Any) -> Any: + return str(data) + + +class FastAPIAppResponse(BaseModel): + """Serializer for Plugin FastAPI App responses.""" + + model_config = ConfigDict(extra="allow") + + app: str + url_prefix: str + name: str + + +class AppBuilderViewResponse(BaseModel): + """Serializer for AppBuilder View responses.""" + + model_config = ConfigDict(extra="allow") + + name: str | None = None + category: str | None = None + view: str | None = None + label: str | None = None + + +class AppBuilderMenuItemResponse(BaseModel): + """Serializer for AppBuilder Menu Item responses.""" + + model_config = ConfigDict(extra="allow") + + name: str + href: str | None = None + category: str | None = None + + +class PluginResponse(BaseModel): + """Plugin serializer.""" + + name: str + macros: list[str] + flask_blueprints: list[str] + fastapi_apps: list[FastAPIAppResponse] + appbuilder_views: list[AppBuilderViewResponse] + appbuilder_menu_items: list[AppBuilderMenuItemResponse] + global_operator_extra_links: list[str] + operator_extra_links: list[str] + source: Annotated[str, BeforeValidator(coerce_to_string)] + ti_deps: list[Annotated[str, BeforeValidator(coerce_to_string)]] + listeners: list[str] + timetables: list[str] + + @field_validator("source", mode="before") + @classmethod + def convert_source(cls, data: Any) -> Any: + if isinstance(data, AirflowPluginSource): + return str(data) + return data + + +class PluginCollectionResponse(BaseModel): + """Plugin Collection serializer.""" + + plugins: list[PluginResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/pools.py b/airflow/api_fastapi/core_api/serializers/pools.py new file mode 100644 index 0000000000000..ef3676a8afec7 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/pools.py @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from typing import Annotated, Callable + +from pydantic import BaseModel, BeforeValidator, ConfigDict, Field + + +def _call_function(function: Callable[[], int]) -> int: + """ + Call the given function. + + Used for the BeforeValidator to get the actual values from the bound method. + """ + return function() + + +class BasePool(BaseModel): + """Base serializer for Pool.""" + + pool: str = Field(serialization_alias="name") + slots: int + description: str | None + include_deferred: bool + + +class PoolResponse(BasePool): + """Pool serializer for responses.""" + + occupied_slots: Annotated[int, BeforeValidator(_call_function)] + running_slots: Annotated[int, BeforeValidator(_call_function)] + queued_slots: Annotated[int, BeforeValidator(_call_function)] + scheduled_slots: Annotated[int, BeforeValidator(_call_function)] + open_slots: Annotated[int, BeforeValidator(_call_function)] + deferred_slots: Annotated[int, BeforeValidator(_call_function)] + + +class PoolCollectionResponse(BaseModel): + """Pool Collection serializer for responses.""" + + pools: list[PoolResponse] + total_entries: int + + +class PoolPatchBody(BaseModel): + """Pool serializer for patch bodies.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str | None = Field(default=None, alias="pool") + slots: int | None = None + description: str | None = None + include_deferred: bool | None = None + + +class PoolPostBody(BasePool): + """Pool serializer for post bodies.""" + + pool: str = Field(alias="name") + description: str | None = None + include_deferred: bool = False diff --git a/airflow/api_fastapi/core_api/serializers/providers.py b/airflow/api_fastapi/core_api/serializers/providers.py new file mode 100644 index 0000000000000..4e542f19f9f8e --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/providers.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from pydantic import BaseModel + + +class ProviderResponse(BaseModel): + """Provider serializer for responses.""" + + package_name: str + description: str + version: str + + +class ProviderCollectionResponse(BaseModel): + """Provider Collection serializer for responses.""" + + providers: list[ProviderResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/task_instances.py b/airflow/api_fastapi/core_api/serializers/task_instances.py new file mode 100644 index 0000000000000..b8a10e8fb8614 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/task_instances.py @@ -0,0 +1,71 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime +from typing import Annotated + +from pydantic import AliasPath, BaseModel, BeforeValidator, ConfigDict, Field + +from airflow.api_fastapi.core_api.serializers.job import JobResponse +from airflow.api_fastapi.core_api.serializers.trigger import TriggerResponse +from airflow.utils.state import TaskInstanceState + + +class TaskInstanceResponse(BaseModel): + """TaskInstance serializer for responses.""" + + model_config = ConfigDict(populate_by_name=True) + + task_id: str + dag_id: str + run_id: str = Field(alias="dag_run_id") + map_index: int + execution_date: datetime = Field(alias="logical_date") + start_date: datetime | None + end_date: datetime | None + duration: float | None + state: TaskInstanceState | None + try_number: int + max_tries: int + task_display_name: str + hostname: str | None + unixname: str | None + pool: str + pool_slots: int + queue: str | None + priority_weight: int | None + operator: str | None + queued_dttm: datetime | None = Field(alias="queued_when") + pid: int | None + executor: str | None + executor_config: Annotated[str, BeforeValidator(str)] + note: str | None + rendered_map_index: str | None + rendered_fields: dict = Field( + validation_alias=AliasPath("rendered_task_instance_fields", "rendered_fields"), + default={}, + ) + trigger: TriggerResponse | None + queued_by_job: JobResponse | None = Field(alias="triggerer_job") + + +class TaskInstanceCollectionResponse(BaseModel): + """Task Instance Collection serializer for responses.""" + + task_instances: list[TaskInstanceResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/trigger.py b/airflow/api_fastapi/core_api/serializers/trigger.py new file mode 100644 index 0000000000000..624fa49ab03a7 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/trigger.py @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, BeforeValidator, ConfigDict +from typing_extensions import Annotated + + +class TriggerResponse(BaseModel): + """Trigger serializer for responses.""" + + model_config = ConfigDict(populate_by_name=True) + + id: int + classpath: str + kwargs: Annotated[str, BeforeValidator(str)] + created_date: datetime + triggerer_id: int | None diff --git a/providers/src/airflow/providers/google/datasets/__init__.py b/airflow/api_fastapi/core_api/serializers/ui/__init__.py similarity index 100% rename from providers/src/airflow/providers/google/datasets/__init__.py rename to airflow/api_fastapi/core_api/serializers/ui/__init__.py diff --git a/airflow/api_fastapi/core_api/serializers/ui/dags.py b/airflow/api_fastapi/core_api/serializers/ui/dags.py new file mode 100644 index 0000000000000..f985ce99a9725 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/ui/dags.py @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +from pydantic import BaseModel + +from airflow.api_fastapi.core_api.serializers.dag_run import DAGRunResponse +from airflow.api_fastapi.core_api.serializers.dags import DAGResponse + + +class DAGWithLatestDagRunsResponse(DAGResponse): + """DAG with latest dag runs response serializer.""" + + latest_dag_runs: list[DAGRunResponse] + + +class DAGWithLatestDagRunsCollectionResponse(BaseModel): + """DAG with latest dag runs collection response serializer.""" + + total_entries: int + dags: list[DAGWithLatestDagRunsResponse] diff --git a/airflow/api_fastapi/core_api/serializers/variables.py b/airflow/api_fastapi/core_api/serializers/variables.py index 1ecc87425a24f..b328972544fd0 100644 --- a/airflow/api_fastapi/core_api/serializers/variables.py +++ b/airflow/api_fastapi/core_api/serializers/variables.py @@ -58,3 +58,10 @@ class VariableBody(VariableBase): """Variable serializer for bodies.""" value: str | None + + +class VariableCollectionResponse(BaseModel): + """Variable Collection serializer for responses.""" + + variables: list[VariableResponse] + total_entries: int diff --git a/airflow/api_fastapi/core_api/serializers/version.py b/airflow/api_fastapi/core_api/serializers/version.py new file mode 100644 index 0000000000000..01c4c45376f70 --- /dev/null +++ b/airflow/api_fastapi/core_api/serializers/version.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +from pydantic import BaseModel + + +class VersionInfo(BaseModel): + """Version information serializer for responses.""" + + version: str + git_version: str | None diff --git a/airflow/assets/__init__.py b/airflow/assets/__init__.py index 4dbc35fb95397..58256929948a8 100644 --- a/airflow/assets/__init__.py +++ b/airflow/assets/__init__.py @@ -39,7 +39,7 @@ from airflow.configuration import conf -__all__ = ["Asset", "AssetAll", "AssetAny"] +__all__ = ["Asset", "AssetAll", "AssetAny", "Dataset"] log = logging.getLogger(__name__) @@ -169,7 +169,7 @@ def expand_alias_to_assets(alias: str | AssetAlias, *, session: Session = NEW_SE select(AssetAliasModel).where(AssetAliasModel.name == alias_name).limit(1) ) if asset_alias_obj: - return [asset.to_public() for asset in asset_alias_obj.datasets] + return [asset.to_public() for asset in asset_alias_obj.assets] return [] @@ -275,13 +275,14 @@ def _set_extra_default(extra: dict | None) -> dict: @attr.define(init=False, unsafe_hash=False) class Asset(os.PathLike, BaseAsset): - """A representation of data dependencies between workflows.""" + """A representation of data asset dependencies between workflows.""" name: str uri: str group: str extra: dict[str, Any] + asset_type: ClassVar[str] = "" __version__: ClassVar[int] = 1 @overload @@ -313,7 +314,7 @@ def __init__( fields = attr.fields_dict(Asset) self.name = _validate_non_empty_identifier(self, fields["name"], name) self.uri = _sanitize_uri(_validate_non_empty_identifier(self, fields["uri"], uri)) - self.group = _validate_identifier(self, fields["group"], group) + self.group = _validate_identifier(self, fields["group"], group) if group else self.asset_type self.extra = _set_extra_default(extra) def __fspath__(self) -> str: @@ -372,6 +373,18 @@ def iter_dag_dependencies(self, *, source: str, target: str) -> Iterator[DagDepe ) +class Dataset(Asset): + """A representation of dataset dependencies between workflows.""" + + asset_type: ClassVar[str] = "dataset" + + +class Model(Asset): + """A representation of model dependencies between workflows.""" + + asset_type: ClassVar[str] = "model" + + class _AssetBooleanCondition(BaseAsset): """Base class for asset boolean logic.""" diff --git a/airflow/assets/manager.py b/airflow/assets/manager.py index cd4d72e633a8e..a06c7c31786f5 100644 --- a/airflow/assets/manager.py +++ b/airflow/assets/manager.py @@ -138,7 +138,7 @@ def register_asset_change( cls._add_asset_alias_association({alias.name for alias in aliases}, asset_model, session=session) event_kwargs = { - "dataset_id": asset_model.id, + "asset_id": asset_model.id, "extra": extra, } if task_instance: @@ -167,7 +167,7 @@ def register_asset_change( ).unique() for asset_alias_model in asset_alias_models: - asset_alias_model.dataset_events.append(asset_event) + asset_alias_model.asset_events.append(asset_event) session.add(asset_alias_model) dags_to_queue_from_asset_alias |= { @@ -224,7 +224,7 @@ def _queue_dagruns(cls, asset_id: int, dags_to_queue: set[DagModel], session: Se @classmethod def _slow_path_queue_dagruns(cls, asset_id: int, dags_to_queue: set[DagModel], session: Session) -> None: def _queue_dagrun_if_needed(dag: DagModel) -> str | None: - item = AssetDagRunQueue(target_dag_id=dag.dag_id, dataset_id=asset_id) + item = AssetDagRunQueue(target_dag_id=dag.dag_id, asset_id=asset_id) # Don't error whole transaction when a single RunQueue item conflicts. # https://docs.sqlalchemy.org/en/14/orm/session_transaction.html#using-savepoint try: @@ -243,7 +243,7 @@ def _postgres_queue_dagruns(cls, asset_id: int, dags_to_queue: set[DagModel], se from sqlalchemy.dialects.postgresql import insert values = [{"target_dag_id": dag.dag_id} for dag in dags_to_queue] - stmt = insert(AssetDagRunQueue).values(dataset_id=asset_id).on_conflict_do_nothing() + stmt = insert(AssetDagRunQueue).values(asset_id=asset_id).on_conflict_do_nothing() session.execute(stmt, values) @classmethod diff --git a/airflow/assets/metadata.py b/airflow/assets/metadata.py index 4fd2902afc8bf..b7522226230f6 100644 --- a/airflow/assets/metadata.py +++ b/airflow/assets/metadata.py @@ -29,7 +29,7 @@ @attrs.define(init=False) class Metadata: - """Metadata to attach to a AssetEvent.""" + """Metadata to attach to an AssetEvent.""" uri: str extra: dict[str, Any] diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py index 5d1fe9ba8e51e..06ac2f7bd8172 100644 --- a/airflow/cli/cli_config.py +++ b/airflow/cli/cli_config.py @@ -304,7 +304,7 @@ def string_lower_type(val): # backfill -ARG_BACKFILL_DAG = Arg(flags=("--dag",), help="The dag to backfill.", required=True) +ARG_BACKFILL_DAG = Arg(flags=("--dag-id",), help="The dag to backfill.", required=True) ARG_BACKFILL_FROM_DATE = Arg( ("--from-date",), help="Earliest logical date to backfill.", type=parsedate, required=True ) @@ -325,6 +325,19 @@ def string_lower_type(val): type=positive_int(allow_zero=False), help="Max active runs for this backfill.", ) +ARG_BACKFILL_DRY_RUN = Arg( + ("--dry-run",), + help="Perform a dry run", + action="store_true", +) +ARG_BACKFILL_REPROCESS_BEHAVIOR = Arg( + ("--reprocess-behavior",), + help=( + "When a run exists for the logical date, controls whether new runs will be " + "created for the date. Default is none." + ), + choices=("none", "completed", "failed"), +) # misc @@ -567,7 +580,6 @@ def string_lower_type(val): ("--ship-dag",), help="Pickles (serializes) the DAG and ships it to the worker", action="store_true" ) ARG_PICKLE = Arg(("-p", "--pickle"), help="Serialized pickle object of the entire dag (used internally)") -ARG_JOB_ID = Arg(("-j", "--job-id"), help=argparse.SUPPRESS) ARG_CFG_PATH = Arg(("--cfg-path",), help="Path to config file to use instead of airflow.cfg") ARG_MAP_INDEX = Arg(("--map-index",), type=int, default=-1, help="Mapped task index") ARG_READ_FROM_DB = Arg(("--read-from-db",), help="Read dag from DB instead of dag file", action="store_true") @@ -1030,6 +1042,8 @@ class GroupCommand(NamedTuple): ARG_DAG_RUN_CONF, ARG_RUN_BACKWARDS, ARG_MAX_ACTIVE_RUNS, + ARG_BACKFILL_REPROCESS_BEHAVIOR, + ARG_BACKFILL_DRY_RUN, ), ), ) @@ -1339,7 +1353,6 @@ class GroupCommand(NamedTuple): ARG_DEPENDS_ON_PAST, ARG_SHIP_DAG, ARG_PICKLE, - ARG_JOB_ID, ARG_INTERACTIVE, ARG_SHUT_DOWN_LOGGING, ARG_MAP_INDEX, diff --git a/airflow/cli/commands/backfill_command.py b/airflow/cli/commands/backfill_command.py index 8714ed5585004..63a8573ab7379 100644 --- a/airflow/cli/commands/backfill_command.py +++ b/airflow/cli/commands/backfill_command.py @@ -21,10 +21,31 @@ import signal from airflow import settings -from airflow.models.backfill import _create_backfill +from airflow.models.backfill import ReprocessBehavior, _create_backfill, _get_info_list +from airflow.models.serialized_dag import SerializedDagModel from airflow.utils import cli as cli_utils from airflow.utils.cli import sigint_handler from airflow.utils.providers_configuration_loader import providers_configuration_loaded +from airflow.utils.session import create_session + + +def _do_dry_run(*, params, dag_id, from_date, to_date, reverse): + print("Performing dry run of backfill.") + print("Printing params:") + for k, v in params.items(): + print(f" - {k} = {v}") + with create_session() as session: + serdag = session.get(SerializedDagModel, dag_id) + + info_list = _get_info_list( + dag=serdag.dag, + from_date=from_date, + to_date=to_date, + reverse=reverse, + ) + print("Logical dates to be attempted:") + for info in info_list: + print(f" - {info.logical_date}") @cli_utils.action_cli @@ -34,11 +55,34 @@ def create_backfill(args) -> None: logging.basicConfig(level=settings.LOGGING_LEVEL, format=settings.SIMPLE_LOG_FORMAT) signal.signal(signal.SIGTERM, sigint_handler) + if args.reprocess_behavior is not None: + reprocess_behavior = ReprocessBehavior(args.reprocess_behavior) + else: + reprocess_behavior = None + + if args.dry_run: + _do_dry_run( + params=dict( + dag_id=args.dag_id, + from_date=args.from_date, + to_date=args.to_date, + max_active_runs=args.max_active_runs, + reverse=args.run_backwards, + dag_run_conf=args.dag_run_conf, + reprocess_behavior=reprocess_behavior, + ), + dag_id=args.dag_id, + from_date=args.from_date, + to_date=args.to_date, + reverse=args.run_backwards, + ) + return _create_backfill( - dag_id=args.dag, + dag_id=args.dag_id, from_date=args.from_date, to_date=args.to_date, max_active_runs=args.max_active_runs, reverse=args.run_backwards, dag_run_conf=args.dag_run_conf, + reprocess_behavior=reprocess_behavior, ) diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 83d0430a717bd..92d1825dc627e 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -227,8 +227,6 @@ def _get_dagbag_dag_details(dag: DAG) -> dict: "last_parsed_time": None, "last_pickled": None, "last_expired": None, - "scheduler_lock": None, - "pickle_id": dag.pickle_id, "default_view": dag.default_view, "fileloc": dag.fileloc, "file_token": None, diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index 3291bc250b085..03d2737072f3f 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -52,7 +52,7 @@ from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.dependencies_deps import SCHEDULER_QUEUED_DEPS from airflow.typing_compat import Literal -from airflow.utils import cli as cli_utils +from airflow.utils import cli as cli_utils, timezone from airflow.utils.cli import ( get_dag, get_dag_by_file_location, @@ -61,7 +61,6 @@ should_ignore_depends_on_past, suppress_logs_and_warning, ) -from airflow.utils.dates import timezone from airflow.utils.log.file_task_handler import _set_task_deferred_context_var from airflow.utils.log.logging_mixin import StreamLogWriter from airflow.utils.log.secrets_masker import RedactedIO @@ -342,7 +341,6 @@ def _run_raw_task(args, ti: TaskInstance) -> None | TaskReturnCode: """Run the main task handling code.""" return ti._run_raw_task( mark_success=args.mark_success, - job_id=args.job_id, pool=args.pool, ) diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index 0be77a3b6829a..cfadcb16fd28a 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -218,15 +218,6 @@ core: type: string example: ~ default: "50" - task_runner: - description: | - The class to use for running task instances in a subprocess. - Choices include StandardTaskRunner, CgroupTaskRunner or the full import path to the class - when using a custom task runner. - version_added: ~ - type: string - example: ~ - default: "StandardTaskRunner" default_impersonation: description: | If set, tasks without a ``run_as_user`` argument will be run with this user @@ -475,7 +466,7 @@ core: version_added: 3.0.0 type: string default: ~ - example: 'airflow.datasets.manager.AssetManager' + example: 'airflow.assets.manager.AssetManager' asset_manager_kwargs: description: Kwargs to supply to asset manager. version_added: 3.0.0 @@ -1032,18 +1023,6 @@ logging: type: boolean example: ~ default: "False" - enable_task_context_logger: - description: | - If enabled, Airflow may ship messages to task logs from outside the task run context, e.g. from - the scheduler, executor, or callback execution context. This can help in circumstances such as - when there's something blocking the execution of the task and ordinarily there may be no task - logs at all. - This is set to ``True`` by default. If you encounter issues with this feature - (e.g. scheduler performance issues) it can be disabled. - version_added: 2.8.0 - type: boolean - example: ~ - default: "True" color_log_error_keywords: description: | A comma separated list of keywords related to errors whose presence should display the line in red @@ -1377,12 +1356,11 @@ api: description: | Comma separated list of auth backends to authenticate users of the API. See `Security: API - `__ for possible values. - ("airflow.api.auth.backend.default" allows all requests for historic reasons) + `__ for possible values version_added: 2.3.0 type: string example: ~ - default: "airflow.api.auth.backend.session" + default: "airflow.providers.fab.auth_manager.api.auth.backend.session" maximum_page_limit: description: | Used to set the maximum page limit for API requests. If limit passed as param @@ -2279,7 +2257,7 @@ scheduler: parsing_cleanup_interval: description: | How often (in seconds) to check for stale DAGs (DAGs which are no longer present in - the expected files) which should be deactivated, as well as datasets that are no longer + the expected files) which should be deactivated, as well as assets that are no longer referenced and should be marked as orphaned. version_added: 2.5.0 type: integer diff --git a/airflow/config_templates/unit_tests.cfg b/airflow/config_templates/unit_tests.cfg index 27134c7218215..b29c642afe77f 100644 --- a/airflow/config_templates/unit_tests.cfg +++ b/airflow/config_templates/unit_tests.cfg @@ -71,7 +71,7 @@ celery_logging_level = INFO smtp_mail_from = airflow@example.com [api] -auth_backends = airflow.api.auth.backend.default +auth_backends = airflow.providers.fab.auth_manager.api.auth.backend.session [hive] # Hive uses the configuration below to run the tests diff --git a/airflow/configuration.py b/airflow/configuration.py index 81dc18365392e..82718325865f1 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -670,11 +670,11 @@ def _upgrade_auth_backends(self): This is required by the UI for ajax queries. """ old_value = self.get("api", "auth_backends", fallback="") - if old_value in ("airflow.api.auth.backend.default", ""): - # handled by deprecated_values - pass - elif old_value.find("airflow.api.auth.backend.session") == -1: - new_value = old_value + ",airflow.api.auth.backend.session" + if ( + old_value.find("airflow.api.auth.backend.session") == -1 + and old_value.find("airflow.providers.fab.auth_manager.api.auth.backend.session") == -1 + ): + new_value = old_value + ",airflow.providers.fab.auth_manager.api.auth.backend.session" self._update_env_var(section="api", name="auth_backends", new_value=new_value) self.upgraded_values[("api", "auth_backends")] = old_value @@ -772,6 +772,21 @@ def _create_future_warning(name: str, section: str, current_value: Any, new_valu stacklevel=3, ) + def mask_secrets(self): + from airflow.utils.log.secrets_masker import mask_secret + + for section, key in self.sensitive_config_values: + try: + value = self.get(section, key, suppress_warnings=True) + except AirflowConfigException: + log.debug( + "Could not retrieve value from section %s, for key %s. Skipping redaction of this conf.", + section, + key, + ) + continue + mask_secret(value) + def _env_var_name(self, section: str, key: str) -> str: return f"{ENV_VAR_PREFIX}{section.replace('.', '_').upper()}__{key.upper()}" @@ -2028,114 +2043,6 @@ def make_group_other_inaccessible(file_path: str): ) -def get(*args, **kwargs) -> ConfigType | None: - """Historical get.""" - warnings.warn( - "Accessing configuration method 'get' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.get'", - DeprecationWarning, - stacklevel=2, - ) - return conf.get(*args, **kwargs) - - -def getboolean(*args, **kwargs) -> bool: - """Historical getboolean.""" - warnings.warn( - "Accessing configuration method 'getboolean' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.getboolean'", - DeprecationWarning, - stacklevel=2, - ) - return conf.getboolean(*args, **kwargs) - - -def getfloat(*args, **kwargs) -> float: - """Historical getfloat.""" - warnings.warn( - "Accessing configuration method 'getfloat' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.getfloat'", - DeprecationWarning, - stacklevel=2, - ) - return conf.getfloat(*args, **kwargs) - - -def getint(*args, **kwargs) -> int: - """Historical getint.""" - warnings.warn( - "Accessing configuration method 'getint' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.getint'", - DeprecationWarning, - stacklevel=2, - ) - return conf.getint(*args, **kwargs) - - -def getsection(*args, **kwargs) -> ConfigOptionsDictType | None: - """Historical getsection.""" - warnings.warn( - "Accessing configuration method 'getsection' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.getsection'", - DeprecationWarning, - stacklevel=2, - ) - return conf.getsection(*args, **kwargs) - - -def has_option(*args, **kwargs) -> bool: - """Historical has_option.""" - warnings.warn( - "Accessing configuration method 'has_option' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.has_option'", - DeprecationWarning, - stacklevel=2, - ) - return conf.has_option(*args, **kwargs) - - -def remove_option(*args, **kwargs) -> bool: - """Historical remove_option.""" - warnings.warn( - "Accessing configuration method 'remove_option' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.remove_option'", - DeprecationWarning, - stacklevel=2, - ) - return conf.remove_option(*args, **kwargs) - - -def as_dict(*args, **kwargs) -> ConfigSourcesType: - """Historical as_dict.""" - warnings.warn( - "Accessing configuration method 'as_dict' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.as_dict'", - DeprecationWarning, - stacklevel=2, - ) - return conf.as_dict(*args, **kwargs) - - -def set(*args, **kwargs) -> None: - """Historical set.""" - warnings.warn( - "Accessing configuration method 'set' directly from the configuration module is " - "deprecated. Please access the configuration from the 'configuration.conf' object via " - "'conf.set'", - DeprecationWarning, - stacklevel=2, - ) - conf.set(*args, **kwargs) - - def ensure_secrets_loaded() -> list[BaseSecretsBackend]: """ Ensure that all secrets backends are loaded. @@ -2192,39 +2099,6 @@ def initialize_secrets_backends() -> list[BaseSecretsBackend]: return backend_list -@functools.lru_cache(maxsize=None) -def _DEFAULT_CONFIG() -> str: - path = _default_config_file_path("default_airflow.cfg") - with open(path) as fh: - return fh.read() - - -@functools.lru_cache(maxsize=None) -def _TEST_CONFIG() -> str: - path = _default_config_file_path("default_test.cfg") - with open(path) as fh: - return fh.read() - - -_deprecated = { - "DEFAULT_CONFIG": _DEFAULT_CONFIG, - "TEST_CONFIG": _TEST_CONFIG, - "TEST_CONFIG_FILE_PATH": functools.partial(_default_config_file_path, "default_test.cfg"), - "DEFAULT_CONFIG_FILE_PATH": functools.partial(_default_config_file_path, "default_airflow.cfg"), -} - - -def __getattr__(name): - if name in _deprecated: - warnings.warn( - f"{__name__}.{name} is deprecated and will be removed in future", - DeprecationWarning, - stacklevel=2, - ) - return _deprecated[name]() - raise AttributeError(f"module {__name__} has no attribute {name}") - - def initialize_auth_manager() -> BaseAuthManager: """ Initialize auth manager. diff --git a/airflow/dag_processing/collection.py b/airflow/dag_processing/collection.py index 068a3727d04c0..f608900ee76e1 100644 --- a/airflow/dag_processing/collection.py +++ b/airflow/dag_processing/collection.py @@ -37,7 +37,6 @@ from airflow.assets import Asset, AssetAlias from airflow.assets.manager import asset_manager from airflow.models.asset import ( - AssetActive, AssetAliasModel, AssetModel, DagScheduleAssetAliasReference, @@ -67,9 +66,9 @@ def _find_orm_dags(dag_ids: Iterable[str], *, session: Session) -> dict[str, Dag select(DagModel) .options(joinedload(DagModel.tags, innerjoin=False)) .where(DagModel.dag_id.in_(dag_ids)) - .options(joinedload(DagModel.schedule_dataset_references)) - .options(joinedload(DagModel.schedule_dataset_alias_references)) - .options(joinedload(DagModel.task_outlet_dataset_references)) + .options(joinedload(DagModel.schedule_asset_references)) + .options(joinedload(DagModel.schedule_asset_alias_references)) + .options(joinedload(DagModel.task_outlet_asset_references)) ) stmt = with_row_locks(stmt, of=DagModel, session=session) return {dm.dag_id: dm for dm in session.scalars(stmt).unique()} @@ -212,7 +211,10 @@ def update_dags( dm.has_import_errors = False dm.last_parsed_time = utcnow() dm.default_view = dag.default_view - dm._dag_display_property_value = dag._dag_display_property_value + if hasattr(dag, "_dag_display_property_value"): + dm._dag_display_property_value = dag._dag_display_property_value + elif dag.dag_display_name != dag.dag_id: + dm._dag_display_property_value = dag.dag_display_name dm.description = dag.description dm.max_active_tasks = dag.max_active_tasks dm.max_active_runs = dag.max_active_runs @@ -223,7 +225,7 @@ def update_dags( ) dm.timetable_summary = dag.timetable.summary dm.timetable_description = dag.timetable.description - dm.dataset_expression = dag.timetable.asset_condition.as_expression() + dm.asset_expression = dag.timetable.asset_condition.as_expression() dm.processor_subdir = processor_subdir last_automated_run: DagRun | None = run_info.latest_runs.get(dag.dag_id) @@ -237,8 +239,8 @@ def update_dags( dm.calculate_dagrun_date_fields(dag, last_automated_data_interval) if not dag.timetable.asset_condition: - dm.schedule_dataset_references = [] - dm.schedule_dataset_alias_references = [] + dm.schedule_asset_references = [] + dm.schedule_asset_alias_references = [] # FIXME: STORE NEW REFERENCES. if dag.tags: @@ -277,7 +279,7 @@ class AssetModelOperation(NamedTuple): schedule_asset_references: dict[str, list[Asset]] schedule_asset_alias_references: dict[str, list[AssetAlias]] outlet_references: dict[str, list[tuple[str, Asset]]] - assets: dict[str, Asset] + assets: dict[tuple[str, str], Asset] asset_aliases: dict[str, AssetAlias] @classmethod @@ -300,22 +302,25 @@ def collect(cls, dags: dict[str, DAG]) -> Self: ] for dag_id, dag in dags.items() }, - assets={asset.uri: asset for asset in _find_all_assets(dags.values())}, + assets={(asset.name, asset.uri): asset for asset in _find_all_assets(dags.values())}, asset_aliases={alias.name: alias for alias in _find_all_asset_aliases(dags.values())}, ) return coll - def add_assets(self, *, session: Session) -> dict[str, AssetModel]: + def add_assets(self, *, session: Session) -> dict[tuple[str, str], AssetModel]: # Optimization: skip all database calls if no assets were collected. if not self.assets: return {} - orm_assets: dict[str, AssetModel] = { - am.uri: am for am in session.scalars(select(AssetModel).where(AssetModel.uri.in_(self.assets))) + orm_assets: dict[tuple[str, str], AssetModel] = { + (am.name, am.uri): am + for am in session.scalars( + select(AssetModel).where(tuple_(AssetModel.name, AssetModel.uri).in_(self.assets)) + ) } orm_assets.update( - (model.uri, model) + ((model.name, model.uri), model) for model in asset_manager.create_assets( - [asset for uri, asset in self.assets.items() if uri not in orm_assets], + [asset for name_uri, asset in self.assets.items() if name_uri not in orm_assets], session=session, ) ) @@ -340,24 +345,10 @@ def add_asset_aliases(self, *, session: Session) -> dict[str, AssetAliasModel]: ) return orm_aliases - def add_asset_active_references(self, assets: Collection[AssetModel], *, session: Session) -> None: - existing_entries = set( - session.execute( - select(AssetActive.name, AssetActive.uri).where( - tuple_(AssetActive.name, AssetActive.uri).in_((asset.name, asset.uri) for asset in assets) - ) - ) - ) - session.add_all( - AssetActive.for_asset(asset) - for asset in assets - if (asset.name, asset.uri) not in existing_entries - ) - def add_dag_asset_references( self, dags: dict[str, DagModel], - assets: dict[str, AssetModel], + assets: dict[tuple[str, str], AssetModel], *, session: Session, ) -> None: @@ -367,15 +358,15 @@ def add_dag_asset_references( for dag_id, references in self.schedule_asset_references.items(): # Optimization: no references at all; this is faster than repeated delete(). if not references: - dags[dag_id].schedule_dataset_references = [] + dags[dag_id].schedule_asset_references = [] continue - referenced_asset_ids = {asset.id for asset in (assets[r.uri] for r in references)} - orm_refs = {r.dataset_id: r for r in dags[dag_id].schedule_dataset_references} + referenced_asset_ids = {asset.id for asset in (assets[r.name, r.uri] for r in references)} + orm_refs = {r.asset_id: r for r in dags[dag_id].schedule_asset_references} for asset_id, ref in orm_refs.items(): if asset_id not in referenced_asset_ids: session.delete(ref) session.bulk_save_objects( - DagScheduleAssetReference(dataset_id=asset_id, dag_id=dag_id) + DagScheduleAssetReference(asset_id=asset_id, dag_id=dag_id) for asset_id in referenced_asset_ids if asset_id not in orm_refs ) @@ -393,10 +384,10 @@ def add_dag_asset_alias_references( for dag_id, references in self.schedule_asset_alias_references.items(): # Optimization: no references at all; this is faster than repeated delete(). if not references: - dags[dag_id].schedule_dataset_alias_references = [] + dags[dag_id].schedule_asset_alias_references = [] continue referenced_alias_ids = {alias.id for alias in (aliases[r.name] for r in references)} - orm_refs = {a.alias_id: a for a in dags[dag_id].schedule_dataset_alias_references} + orm_refs = {a.alias_id: a for a in dags[dag_id].schedule_asset_alias_references} for alias_id, ref in orm_refs.items(): if alias_id not in referenced_alias_ids: session.delete(ref) @@ -409,7 +400,7 @@ def add_dag_asset_alias_references( def add_task_asset_references( self, dags: dict[str, DagModel], - assets: dict[str, AssetModel], + assets: dict[tuple[str, str], AssetModel], *, session: Session, ) -> None: @@ -419,18 +410,18 @@ def add_task_asset_references( for dag_id, references in self.outlet_references.items(): # Optimization: no references at all; this is faster than repeated delete(). if not references: - dags[dag_id].task_outlet_dataset_references = [] + dags[dag_id].task_outlet_asset_references = [] continue referenced_outlets = { (task_id, asset.id) - for task_id, asset in ((task_id, assets[d.uri]) for task_id, d in references) + for task_id, asset in ((task_id, assets[d.name, d.uri]) for task_id, d in references) } - orm_refs = {(r.task_id, r.dataset_id): r for r in dags[dag_id].task_outlet_dataset_references} + orm_refs = {(r.task_id, r.asset_id): r for r in dags[dag_id].task_outlet_asset_references} for key, ref in orm_refs.items(): if key not in referenced_outlets: session.delete(ref) session.bulk_save_objects( - TaskOutletAssetReference(dataset_id=asset_id, dag_id=dag_id, task_id=task_id) + TaskOutletAssetReference(asset_id=asset_id, dag_id=dag_id, task_id=task_id) for task_id, asset_id in referenced_outlets if (task_id, asset_id) not in orm_refs ) diff --git a/airflow/dag_processing/processor.py b/airflow/dag_processing/processor.py index f030cb75019e5..8694f5890ccd8 100644 --- a/airflow/dag_processing/processor.py +++ b/airflow/dag_processing/processor.py @@ -28,7 +28,7 @@ from typing import TYPE_CHECKING, Generator, Iterable from setproctitle import setproctitle -from sqlalchemy import delete, event +from sqlalchemy import delete, event, select from airflow import settings from airflow.api_internal.internal_api_call import internal_api_call @@ -533,7 +533,14 @@ def _validate_task_pools_and_update_dag_warnings( ) ) - stored_warnings = set(session.query(DagWarning).filter(DagWarning.dag_id.in_(dag_ids)).all()) + stored_warnings = set( + session.scalars( + select(DagWarning).where( + DagWarning.dag_id.in_(dag_ids), + DagWarning.warning_type == DagWarningType.NONEXISTENT_POOL, + ) + ) + ) for warning_to_delete in stored_warnings - warnings: session.delete(warning_to_delete) diff --git a/airflow/datasets/__init__.py b/airflow/datasets/__init__.py new file mode 100644 index 0000000000000..34729e437805b --- /dev/null +++ b/airflow/datasets/__init__.py @@ -0,0 +1,45 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# We do not use "from __future__ import annotations" here because it is not supported +# by Pycharm when we want to make sure all imports in airflow work from namespace packages +# Adding it automatically is excluded in pyproject.toml via I002 ruff rule exclusion + +# Make `airflow` a namespace package, supporting installing +# airflow.providers.* in different locations (i.e. one in site, and one in user +# lib.) This is required by some IDEs to resolve the import paths. +from __future__ import annotations + +import warnings + +from airflow.assets import AssetAlias as DatasetAlias, Dataset + +# TODO: Remove this module in Airflow 3.2 + +warnings.warn( + "Import from the airflow.dataset module is deprecated and " + "will be removed in the Airflow 3.2. Please import it from 'airflow.assets'.", + DeprecationWarning, + stacklevel=2, +) + + +__all__ = [ + "Dataset", + "DatasetAlias", +] diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py index bb9602d50c1cd..1c9e441190a06 100644 --- a/airflow/decorators/base.py +++ b/airflow/decorators/base.py @@ -41,7 +41,6 @@ import typing_extensions from airflow.assets import Asset -from airflow.models.abstractoperator import DEFAULT_RETRIES, DEFAULT_RETRY_DELAY from airflow.models.baseoperator import ( BaseOperator, coerce_resources, @@ -49,7 +48,6 @@ get_merged_defaults, parse_retries, ) -from airflow.models.dag import DagContext from airflow.models.expandinput import ( EXPAND_INPUT_EMPTY, DictOfListsExpandInput, @@ -57,27 +55,27 @@ is_mappable, ) from airflow.models.mappedoperator import MappedOperator, ensure_xcomarg_return_value -from airflow.models.pool import Pool from airflow.models.xcom_arg import XComArg +from airflow.sdk.definitions.baseoperator import BaseOperator as TaskSDKBaseOperator +from airflow.sdk.definitions.contextmanager import DagContext, TaskGroupContext from airflow.typing_compat import ParamSpec, Protocol from airflow.utils import timezone from airflow.utils.context import KNOWN_CONTEXT_KEYS from airflow.utils.decorators import remove_task_decorator from airflow.utils.helpers import prevent_duplicates -from airflow.utils.task_group import TaskGroupContext from airflow.utils.trigger_rule import TriggerRule from airflow.utils.types import NOTSET if TYPE_CHECKING: from sqlalchemy.orm import Session - from airflow.models.dag import DAG from airflow.models.expandinput import ( ExpandInput, OperatorExpandArgument, OperatorExpandKwargsArgument, ) from airflow.models.mappedoperator import ValidationSource + from airflow.sdk import DAG from airflow.utils.context import Context from airflow.utils.task_group import TaskGroup @@ -141,13 +139,13 @@ def get_unique_task_id( ... task_id__20 """ - dag = dag or DagContext.get_current_dag() + dag = dag or DagContext.get_current() if not dag: return task_id # We need to check if we are in the context of TaskGroup as the task_id may # already be altered - task_group = task_group or TaskGroupContext.get_current_task_group(dag) + task_group = task_group or TaskGroupContext.get_current(dag) tg_task_id = task_group.child_id(task_id) if task_group else task_id if tg_task_id not in dag.task_ids: @@ -428,8 +426,8 @@ def _expand(self, expand_input: ExpandInput, *, strict: bool) -> XComArg: ensure_xcomarg_return_value(expand_input.value) task_kwargs = self.kwargs.copy() - dag = task_kwargs.pop("dag", None) or DagContext.get_current_dag() - task_group = task_kwargs.pop("task_group", None) or TaskGroupContext.get_current_task_group(dag) + dag = task_kwargs.pop("dag", None) or DagContext.get_current() + task_group = task_kwargs.pop("task_group", None) or TaskGroupContext.get_current(dag) default_args, partial_params = get_merged_defaults( dag=dag, @@ -442,7 +440,7 @@ def _expand(self, expand_input: ExpandInput, *, strict: bool) -> XComArg: "is_teardown": self.is_teardown, "on_failure_fail_dagrun": self.on_failure_fail_dagrun, } - base_signature = inspect.signature(BaseOperator) + base_signature = inspect.signature(TaskSDKBaseOperator) ignore = { "default_args", # This is target we are working on now. "kwargs", # A common name for a keyword argument. @@ -460,32 +458,26 @@ def _expand(self, expand_input: ExpandInput, *, strict: bool) -> XComArg: task_id = task_group.child_id(task_id) # Logic here should be kept in sync with BaseOperatorMeta.partial(). - if "task_concurrency" in partial_kwargs: - raise TypeError("unexpected argument: task_concurrency") if partial_kwargs.get("wait_for_downstream"): partial_kwargs["depends_on_past"] = True start_date = timezone.convert_to_utc(partial_kwargs.pop("start_date", None)) end_date = timezone.convert_to_utc(partial_kwargs.pop("end_date", None)) - if partial_kwargs.get("pool") is None: - partial_kwargs["pool"] = Pool.DEFAULT_POOL_NAME if "pool_slots" in partial_kwargs: if partial_kwargs["pool_slots"] < 1: dag_str = "" if dag: dag_str = f" in dag {dag.dag_id}" raise ValueError(f"pool slots for {task_id}{dag_str} cannot be less than 1") - partial_kwargs["retries"] = parse_retries(partial_kwargs.get("retries", DEFAULT_RETRIES)) - partial_kwargs["retry_delay"] = coerce_timedelta( - partial_kwargs.get("retry_delay", DEFAULT_RETRY_DELAY), - key="retry_delay", - ) - max_retry_delay = partial_kwargs.get("max_retry_delay") - partial_kwargs["max_retry_delay"] = ( - max_retry_delay - if max_retry_delay is None - else coerce_timedelta(max_retry_delay, key="max_retry_delay") - ) - partial_kwargs["resources"] = coerce_resources(partial_kwargs.get("resources")) + + for fld, convert in ( + ("retries", parse_retries), + ("retry_delay", coerce_timedelta), + ("max_retry_delay", coerce_timedelta), + ("resources", coerce_resources), + ): + if (v := partial_kwargs.get(fld, NOTSET)) is not NOTSET: + partial_kwargs[fld] = convert(v) # type: ignore[operator] + partial_kwargs.setdefault("executor_config", {}) partial_kwargs.setdefault("op_args", []) partial_kwargs.setdefault("op_kwargs", {}) diff --git a/airflow/decorators/bash.py b/airflow/decorators/bash.py index 44738492da098..e4dc19745e0ab 100644 --- a/airflow/decorators/bash.py +++ b/airflow/decorators/bash.py @@ -18,7 +18,7 @@ from __future__ import annotations import warnings -from typing import Any, Callable, Collection, Mapping, Sequence +from typing import Any, Callable, ClassVar, Collection, Mapping, Sequence from airflow.decorators.base import DecoratedOperator, TaskDecorator, task_decorator_factory from airflow.providers.standard.operators.bash import BashOperator @@ -39,7 +39,7 @@ class _BashDecoratedOperator(DecoratedOperator, BashOperator): """ template_fields: Sequence[str] = (*DecoratedOperator.template_fields, *BashOperator.template_fields) - template_fields_renderers: dict[str, str] = { + template_fields_renderers: ClassVar[dict[str, str]] = { **DecoratedOperator.template_fields_renderers, **BashOperator.template_fields_renderers, } diff --git a/airflow/decorators/branch_external_python.py b/airflow/decorators/branch_external_python.py index 2902a47c67741..dbba01034ff5e 100644 --- a/airflow/decorators/branch_external_python.py +++ b/airflow/decorators/branch_external_python.py @@ -20,7 +20,7 @@ from airflow.decorators.base import task_decorator_factory from airflow.decorators.python import _PythonDecoratedOperator -from airflow.operators.python import BranchExternalPythonOperator +from airflow.providers.standard.operators.python import BranchExternalPythonOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/branch_python.py b/airflow/decorators/branch_python.py index 31750ef657a94..3d955a480b75b 100644 --- a/airflow/decorators/branch_python.py +++ b/airflow/decorators/branch_python.py @@ -20,7 +20,7 @@ from airflow.decorators.base import task_decorator_factory from airflow.decorators.python import _PythonDecoratedOperator -from airflow.operators.python import BranchPythonOperator +from airflow.providers.standard.operators.python import BranchPythonOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/branch_virtualenv.py b/airflow/decorators/branch_virtualenv.py index c96638ee20246..c2c39cfe58d31 100644 --- a/airflow/decorators/branch_virtualenv.py +++ b/airflow/decorators/branch_virtualenv.py @@ -20,7 +20,7 @@ from airflow.decorators.base import task_decorator_factory from airflow.decorators.python import _PythonDecoratedOperator -from airflow.operators.python import BranchPythonVirtualenvOperator +from airflow.providers.standard.operators.python import BranchPythonVirtualenvOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/external_python.py b/airflow/decorators/external_python.py index 2d8e2603f94dd..e57fccac141a6 100644 --- a/airflow/decorators/external_python.py +++ b/airflow/decorators/external_python.py @@ -20,7 +20,7 @@ from airflow.decorators.base import task_decorator_factory from airflow.decorators.python import _PythonDecoratedOperator -from airflow.operators.python import ExternalPythonOperator +from airflow.providers.standard.operators.python import ExternalPythonOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/python.py b/airflow/decorators/python.py index 7a890cf862270..b65a4a9667009 100644 --- a/airflow/decorators/python.py +++ b/airflow/decorators/python.py @@ -19,7 +19,7 @@ from typing import TYPE_CHECKING, Callable, Sequence from airflow.decorators.base import DecoratedOperator, task_decorator_factory -from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.python import PythonOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/python_virtualenv.py b/airflow/decorators/python_virtualenv.py index d0eb93a0d7aa6..869d61692d11e 100644 --- a/airflow/decorators/python_virtualenv.py +++ b/airflow/decorators/python_virtualenv.py @@ -20,7 +20,7 @@ from airflow.decorators.base import task_decorator_factory from airflow.decorators.python import _PythonDecoratedOperator -from airflow.operators.python import PythonVirtualenvOperator +from airflow.providers.standard.operators.python import PythonVirtualenvOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/sensor.py b/airflow/decorators/sensor.py index c332a78f95c73..5d409c2d599d8 100644 --- a/airflow/decorators/sensor.py +++ b/airflow/decorators/sensor.py @@ -17,10 +17,10 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Callable, Sequence +from typing import TYPE_CHECKING, Callable, ClassVar, Sequence from airflow.decorators.base import get_unique_task_id, task_decorator_factory -from airflow.sensors.python import PythonSensor +from airflow.providers.standard.sensors.python import PythonSensor if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator @@ -42,7 +42,7 @@ class DecoratedSensorOperator(PythonSensor): """ template_fields: Sequence[str] = ("op_args", "op_kwargs") - template_fields_renderers: dict[str, str] = {"op_args": "py", "op_kwargs": "py"} + template_fields_renderers: ClassVar[dict[str, str]] = {"op_args": "py", "op_kwargs": "py"} custom_operator_name = "@task.sensor" diff --git a/airflow/decorators/short_circuit.py b/airflow/decorators/short_circuit.py index c964ed6bb75fd..89fa6ac5ac6de 100644 --- a/airflow/decorators/short_circuit.py +++ b/airflow/decorators/short_circuit.py @@ -20,7 +20,7 @@ from airflow.decorators.base import task_decorator_factory from airflow.decorators.python import _PythonDecoratedOperator -from airflow.operators.python import ShortCircuitOperator +from airflow.providers.standard.operators.python import ShortCircuitOperator if TYPE_CHECKING: from airflow.decorators.base import TaskDecorator diff --git a/airflow/decorators/task_group.py b/airflow/decorators/task_group.py index 6eee426e936ae..daaa81e1ce62a 100644 --- a/airflow/decorators/task_group.py +++ b/airflow/decorators/task_group.py @@ -38,8 +38,8 @@ ListOfDictsExpandInput, MappedArgument, ) -from airflow.models.taskmixin import DAGNode from airflow.models.xcom_arg import XComArg +from airflow.sdk.definitions.node import DAGNode from airflow.typing_compat import ParamSpec from airflow.utils.helpers import prevent_duplicates from airflow.utils.task_group import MappedTaskGroup, TaskGroup diff --git a/airflow/example_dags/example_asset_alias_with_no_taskflow.py b/airflow/example_dags/example_asset_alias_with_no_taskflow.py index 3293f7e45bb94..c9b04d66d2f63 100644 --- a/airflow/example_dags/example_asset_alias_with_no_taskflow.py +++ b/airflow/example_dags/example_asset_alias_with_no_taskflow.py @@ -37,7 +37,7 @@ from airflow import DAG from airflow.assets import Asset, AssetAlias -from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.python import PythonOperator with DAG( dag_id="asset_s3_bucket_producer_with_no_taskflow", diff --git a/airflow/example_dags/example_branch_operator.py b/airflow/example_dags/example_branch_operator.py index 492d6315d1f22..35f7a9b548e56 100644 --- a/airflow/example_dags/example_branch_operator.py +++ b/airflow/example_dags/example_branch_operator.py @@ -29,143 +29,140 @@ import pendulum -from airflow.operators.python import is_venv_installed - -if is_venv_installed(): - from airflow.models.dag import DAG - from airflow.operators.empty import EmptyOperator - from airflow.operators.python import ( - BranchExternalPythonOperator, - BranchPythonOperator, - BranchPythonVirtualenvOperator, - ExternalPythonOperator, - PythonOperator, - PythonVirtualenvOperator, +from airflow.models.dag import DAG +from airflow.operators.empty import EmptyOperator +from airflow.providers.standard.operators.python import ( + BranchExternalPythonOperator, + BranchPythonOperator, + BranchPythonVirtualenvOperator, + ExternalPythonOperator, + PythonOperator, + PythonVirtualenvOperator, +) +from airflow.utils.edgemodifier import Label +from airflow.utils.trigger_rule import TriggerRule + +PATH_TO_PYTHON_BINARY = sys.executable + +with DAG( + dag_id="example_branch_operator", + start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), + catchup=False, + schedule="@daily", + tags=["example", "example2"], + orientation="TB", +) as dag: + run_this_first = EmptyOperator( + task_id="run_this_first", ) - from airflow.utils.edgemodifier import Label - from airflow.utils.trigger_rule import TriggerRule - - PATH_TO_PYTHON_BINARY = sys.executable - - with DAG( - dag_id="example_branch_operator", - start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), - catchup=False, - schedule="@daily", - tags=["example", "example2"], - orientation="TB", - ) as dag: - run_this_first = EmptyOperator( - task_id="run_this_first", - ) - options = ["a", "b", "c", "d"] + options = ["a", "b", "c", "d"] + + # Example branching on standard Python tasks - # Example branching on standard Python tasks + # [START howto_operator_branch_python] + branching = BranchPythonOperator( + task_id="branching", + python_callable=lambda: f"branch_{random.choice(options)}", + ) + # [END howto_operator_branch_python] + run_this_first >> branching + + join = EmptyOperator( + task_id="join", + trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS, + ) - # [START howto_operator_branch_python] - branching = BranchPythonOperator( - task_id="branching", - python_callable=lambda: f"branch_{random.choice(options)}", + for option in options: + t = PythonOperator( + task_id=f"branch_{option}", + python_callable=lambda: print("Hello World"), ) - # [END howto_operator_branch_python] - run_this_first >> branching - join = EmptyOperator( - task_id="join", - trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS, + empty_follow = EmptyOperator( + task_id="follow_" + option, ) - for option in options: - t = PythonOperator( - task_id=f"branch_{option}", - python_callable=lambda: print("Hello World"), - ) + # Label is optional here, but it can help identify more complex branches + branching >> Label(option) >> t >> empty_follow >> join - empty_follow = EmptyOperator( - task_id="follow_" + option, - ) + # Example the same with external Python calls - # Label is optional here, but it can help identify more complex branches - branching >> Label(option) >> t >> empty_follow >> join + # [START howto_operator_branch_ext_py] + def branch_with_external_python(choices): + import random - # Example the same with external Python calls + return f"ext_py_{random.choice(choices)}" - # [START howto_operator_branch_ext_py] - def branch_with_external_python(choices): - import random + branching_ext_py = BranchExternalPythonOperator( + task_id="branching_ext_python", + python=PATH_TO_PYTHON_BINARY, + python_callable=branch_with_external_python, + op_args=[options], + ) + # [END howto_operator_branch_ext_py] + join >> branching_ext_py + + join_ext_py = EmptyOperator( + task_id="join_ext_python", + trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS, + ) - return f"ext_py_{random.choice(choices)}" + def hello_world_with_external_python(): + print("Hello World from external Python") - branching_ext_py = BranchExternalPythonOperator( - task_id="branching_ext_python", + for option in options: + t = ExternalPythonOperator( + task_id=f"ext_py_{option}", python=PATH_TO_PYTHON_BINARY, - python_callable=branch_with_external_python, - op_args=[options], + python_callable=hello_world_with_external_python, ) - # [END howto_operator_branch_ext_py] - join >> branching_ext_py - join_ext_py = EmptyOperator( - task_id="join_ext_python", - trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS, - ) + # Label is optional here, but it can help identify more complex branches + branching_ext_py >> Label(option) >> t >> join_ext_py - def hello_world_with_external_python(): - print("Hello World from external Python") + # Example the same with Python virtual environments - for option in options: - t = ExternalPythonOperator( - task_id=f"ext_py_{option}", - python=PATH_TO_PYTHON_BINARY, - python_callable=hello_world_with_external_python, - ) + # [START howto_operator_branch_virtualenv] + # Note: Passing a caching dir allows to keep the virtual environment over multiple runs + # Run the example a second time and see that it re-uses it and is faster. + VENV_CACHE_PATH = Path(tempfile.gettempdir()) - # Label is optional here, but it can help identify more complex branches - branching_ext_py >> Label(option) >> t >> join_ext_py + def branch_with_venv(choices): + import random - # Example the same with Python virtual environments + import numpy as np - # [START howto_operator_branch_virtualenv] - # Note: Passing a caching dir allows to keep the virtual environment over multiple runs - # Run the example a second time and see that it re-uses it and is faster. - VENV_CACHE_PATH = Path(tempfile.gettempdir()) + print(f"Some numpy stuff: {np.arange(6)}") + return f"venv_{random.choice(choices)}" - def branch_with_venv(choices): - import random + branching_venv = BranchPythonVirtualenvOperator( + task_id="branching_venv", + requirements=["numpy~=1.26.0"], + venv_cache_path=VENV_CACHE_PATH, + python_callable=branch_with_venv, + op_args=[options], + ) + # [END howto_operator_branch_virtualenv] + join_ext_py >> branching_venv + + join_venv = EmptyOperator( + task_id="join_venv", + trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS, + ) - import numpy as np + def hello_world_with_venv(): + import numpy as np - print(f"Some numpy stuff: {np.arange(6)}") - return f"venv_{random.choice(choices)}" + print(f"Hello World with some numpy stuff: {np.arange(6)}") - branching_venv = BranchPythonVirtualenvOperator( - task_id="branching_venv", + for option in options: + t = PythonVirtualenvOperator( + task_id=f"venv_{option}", requirements=["numpy~=1.26.0"], venv_cache_path=VENV_CACHE_PATH, - python_callable=branch_with_venv, - op_args=[options], - ) - # [END howto_operator_branch_virtualenv] - join_ext_py >> branching_venv - - join_venv = EmptyOperator( - task_id="join_venv", - trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS, + python_callable=hello_world_with_venv, ) - def hello_world_with_venv(): - import numpy as np - - print(f"Hello World with some numpy stuff: {np.arange(6)}") - - for option in options: - t = PythonVirtualenvOperator( - task_id=f"venv_{option}", - requirements=["numpy~=1.26.0"], - venv_cache_path=VENV_CACHE_PATH, - python_callable=hello_world_with_venv, - ) - - # Label is optional here, but it can help identify more complex branches - branching_venv >> Label(option) >> t >> join_venv + # Label is optional here, but it can help identify more complex branches + branching_venv >> Label(option) >> t >> join_venv diff --git a/airflow/example_dags/example_branch_operator_decorator.py b/airflow/example_dags/example_branch_operator_decorator.py index 59cb3b2919475..e9b3bea97a72f 100644 --- a/airflow/example_dags/example_branch_operator_decorator.py +++ b/airflow/example_dags/example_branch_operator_decorator.py @@ -30,121 +30,116 @@ import pendulum -from airflow.operators.python import is_venv_installed +from airflow.decorators import task +from airflow.models.dag import DAG +from airflow.operators.empty import EmptyOperator +from airflow.utils.edgemodifier import Label +from airflow.utils.trigger_rule import TriggerRule -if is_venv_installed(): - from airflow.decorators import task - from airflow.models.dag import DAG - from airflow.operators.empty import EmptyOperator - from airflow.utils.edgemodifier import Label - from airflow.utils.trigger_rule import TriggerRule +PATH_TO_PYTHON_BINARY = sys.executable - PATH_TO_PYTHON_BINARY = sys.executable +with DAG( + dag_id="example_branch_python_operator_decorator", + start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), + catchup=False, + schedule="@daily", + tags=["example", "example2"], + orientation="TB", +) as dag: + run_this_first = EmptyOperator(task_id="run_this_first") - with DAG( - dag_id="example_branch_python_operator_decorator", - start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), - catchup=False, - schedule="@daily", - tags=["example", "example2"], - orientation="TB", - ) as dag: - run_this_first = EmptyOperator(task_id="run_this_first") + options = ["a", "b", "c", "d"] - options = ["a", "b", "c", "d"] + # Example branching on standard Python tasks - # Example branching on standard Python tasks + # [START howto_operator_branch_python] + @task.branch() + def branching(choices: list[str]) -> str: + return f"branch_{random.choice(choices)}" - # [START howto_operator_branch_python] - @task.branch() - def branching(choices: list[str]) -> str: - return f"branch_{random.choice(choices)}" + # [END howto_operator_branch_python] - # [END howto_operator_branch_python] + random_choice_instance = branching(choices=options) - random_choice_instance = branching(choices=options) + run_this_first >> random_choice_instance - run_this_first >> random_choice_instance + join = EmptyOperator(task_id="join", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS) - join = EmptyOperator(task_id="join", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS) + for option in options: - for option in options: + @task(task_id=f"branch_{option}") + def some_task(): + print("doing something in Python") - @task(task_id=f"branch_{option}") - def some_task(): - print("doing something in Python") + t = some_task() + empty = EmptyOperator(task_id=f"follow_{option}") - t = some_task() - empty = EmptyOperator(task_id=f"follow_{option}") + # Label is optional here, but it can help identify more complex branches + random_choice_instance >> Label(option) >> t >> empty >> join - # Label is optional here, but it can help identify more complex branches - random_choice_instance >> Label(option) >> t >> empty >> join + # Example the same with external Python calls - # Example the same with external Python calls + # [START howto_operator_branch_ext_py] + @task.branch_external_python(python=PATH_TO_PYTHON_BINARY) + def branching_ext_python(choices) -> str: + import random - # [START howto_operator_branch_ext_py] - @task.branch_external_python(python=PATH_TO_PYTHON_BINARY) - def branching_ext_python(choices) -> str: - import random + return f"ext_py_{random.choice(choices)}" - return f"ext_py_{random.choice(choices)}" + # [END howto_operator_branch_ext_py] - # [END howto_operator_branch_ext_py] + random_choice_ext_py = branching_ext_python(choices=options) - random_choice_ext_py = branching_ext_python(choices=options) + join >> random_choice_ext_py - join >> random_choice_ext_py + join_ext_py = EmptyOperator(task_id="join_ext_py", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS) - join_ext_py = EmptyOperator( - task_id="join_ext_py", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS - ) - - for option in options: + for option in options: - @task.external_python(task_id=f"ext_py_{option}", python=PATH_TO_PYTHON_BINARY) - def some_ext_py_task(): - print("doing something in external Python") + @task.external_python(task_id=f"ext_py_{option}", python=PATH_TO_PYTHON_BINARY) + def some_ext_py_task(): + print("doing something in external Python") - t = some_ext_py_task() + t = some_ext_py_task() - # Label is optional here, but it can help identify more complex branches - random_choice_ext_py >> Label(option) >> t >> join_ext_py + # Label is optional here, but it can help identify more complex branches + random_choice_ext_py >> Label(option) >> t >> join_ext_py - # Example the same with Python virtual environments + # Example the same with Python virtual environments - # [START howto_operator_branch_virtualenv] - # Note: Passing a caching dir allows to keep the virtual environment over multiple runs - # Run the example a second time and see that it re-uses it and is faster. - VENV_CACHE_PATH = tempfile.gettempdir() + # [START howto_operator_branch_virtualenv] + # Note: Passing a caching dir allows to keep the virtual environment over multiple runs + # Run the example a second time and see that it re-uses it and is faster. + VENV_CACHE_PATH = tempfile.gettempdir() - @task.branch_virtualenv(requirements=["numpy~=1.24.4"], venv_cache_path=VENV_CACHE_PATH) - def branching_virtualenv(choices) -> str: - import random + @task.branch_virtualenv(requirements=["numpy~=1.24.4"], venv_cache_path=VENV_CACHE_PATH) + def branching_virtualenv(choices) -> str: + import random - import numpy as np + import numpy as np - print(f"Some numpy stuff: {np.arange(6)}") - return f"venv_{random.choice(choices)}" + print(f"Some numpy stuff: {np.arange(6)}") + return f"venv_{random.choice(choices)}" - # [END howto_operator_branch_virtualenv] + # [END howto_operator_branch_virtualenv] - random_choice_venv = branching_virtualenv(choices=options) + random_choice_venv = branching_virtualenv(choices=options) - join_ext_py >> random_choice_venv + join_ext_py >> random_choice_venv - join_venv = EmptyOperator(task_id="join_venv", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS) + join_venv = EmptyOperator(task_id="join_venv", trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS) - for option in options: + for option in options: - @task.virtualenv( - task_id=f"venv_{option}", requirements=["numpy~=1.24.4"], venv_cache_path=VENV_CACHE_PATH - ) - def some_venv_task(): - import numpy as np + @task.virtualenv( + task_id=f"venv_{option}", requirements=["numpy~=1.24.4"], venv_cache_path=VENV_CACHE_PATH + ) + def some_venv_task(): + import numpy as np - print(f"Some numpy stuff: {np.arange(6)}") + print(f"Some numpy stuff: {np.arange(6)}") - t = some_venv_task() + t = some_venv_task() - # Label is optional here, but it can help identify more complex branches - random_choice_venv >> Label(option) >> t >> join_venv + # Label is optional here, but it can help identify more complex branches + random_choice_venv >> Label(option) >> t >> join_venv diff --git a/airflow/example_dags/example_python_context_decorator.py b/airflow/example_dags/example_python_context_decorator.py index 497ee08e17cea..9cfc318757420 100644 --- a/airflow/example_dags/example_python_context_decorator.py +++ b/airflow/example_dags/example_python_context_decorator.py @@ -45,7 +45,7 @@ def print_context() -> str: """Print the Airflow context.""" from pprint import pprint - from airflow.operators.python import get_current_context + from airflow.providers.standard.operators.python import get_current_context context = get_current_context() pprint(context) @@ -60,7 +60,7 @@ def print_context_venv() -> str: """Print the Airflow context in venv.""" from pprint import pprint - from airflow.operators.python import get_current_context + from airflow.providers.standard.operators.python import get_current_context context = get_current_context() pprint(context) @@ -77,7 +77,7 @@ def print_context_external() -> str: """Print the Airflow context in external python.""" from pprint import pprint - from airflow.operators.python import get_current_context + from airflow.providers.standard.operators.python import get_current_context context = get_current_context() pprint(context) diff --git a/airflow/example_dags/example_python_context_operator.py b/airflow/example_dags/example_python_context_operator.py index f1b76c527cfd6..4dc9383dd06d6 100644 --- a/airflow/example_dags/example_python_context_operator.py +++ b/airflow/example_dags/example_python_context_operator.py @@ -28,7 +28,11 @@ import pendulum from airflow import DAG -from airflow.operators.python import ExternalPythonOperator, PythonOperator, PythonVirtualenvOperator +from airflow.providers.standard.operators.python import ( + ExternalPythonOperator, + PythonOperator, + PythonVirtualenvOperator, +) SOME_EXTERNAL_PYTHON = sys.executable @@ -44,7 +48,7 @@ def print_context() -> str: """Print the Airflow context.""" from pprint import pprint - from airflow.operators.python import get_current_context + from airflow.providers.standard.operators.python import get_current_context context = get_current_context() pprint(context) @@ -58,7 +62,7 @@ def print_context_venv() -> str: """Print the Airflow context in venv.""" from pprint import pprint - from airflow.operators.python import get_current_context + from airflow.providers.standard.operators.python import get_current_context context = get_current_context() pprint(context) @@ -74,7 +78,7 @@ def print_context_external() -> str: """Print the Airflow context in external python.""" from pprint import pprint - from airflow.operators.python import get_current_context + from airflow.providers.standard.operators.python import get_current_context context = get_current_context() pprint(context) diff --git a/airflow/example_dags/example_python_decorator.py b/airflow/example_dags/example_python_decorator.py index 264fc4333349f..7619bc3b6a517 100644 --- a/airflow/example_dags/example_python_decorator.py +++ b/airflow/example_dags/example_python_decorator.py @@ -30,7 +30,6 @@ import pendulum from airflow.decorators import dag, task -from airflow.operators.python import is_venv_installed log = logging.getLogger(__name__) @@ -76,61 +75,58 @@ def my_sleeping_function(random_base): run_this >> log_the_sql >> sleeping_task # [END howto_operator_python_kwargs] - if not is_venv_installed(): - log.warning("The virtalenv_python example task requires virtualenv, please install it.") - else: - # [START howto_operator_python_venv] - @task.virtualenv( - task_id="virtualenv_python", requirements=["colorama==0.4.0"], system_site_packages=False - ) - def callable_virtualenv(): - """ - Example function that will be performed in a virtual environment. - - Importing at the module level ensures that it will not attempt to import the - library before it is installed. - """ - from time import sleep - - from colorama import Back, Fore, Style - - print(Fore.RED + "some red text") - print(Back.GREEN + "and with a green background") - print(Style.DIM + "and in dim text") - print(Style.RESET_ALL) - for _ in range(4): - print(Style.DIM + "Please wait...", flush=True) - sleep(1) - print("Finished") - - virtualenv_task = callable_virtualenv() - # [END howto_operator_python_venv] - - sleeping_task >> virtualenv_task - - # [START howto_operator_external_python] - @task.external_python(task_id="external_python", python=PATH_TO_PYTHON_BINARY) - def callable_external_python(): - """ - Example function that will be performed in a virtual environment. - - Importing at the module level ensures that it will not attempt to import the - library before it is installed. - """ - import sys - from time import sleep - - print(f"Running task via {sys.executable}") - print("Sleeping") - for _ in range(4): - print("Please wait...", flush=True) - sleep(1) - print("Finished") - - external_python_task = callable_external_python() - # [END howto_operator_external_python] - - run_this >> external_python_task >> virtualenv_task + # [START howto_operator_python_venv] + @task.virtualenv( + task_id="virtualenv_python", requirements=["colorama==0.4.0"], system_site_packages=False + ) + def callable_virtualenv(): + """ + Example function that will be performed in a virtual environment. + + Importing at the module level ensures that it will not attempt to import the + library before it is installed. + """ + from time import sleep + + from colorama import Back, Fore, Style + + print(Fore.RED + "some red text") + print(Back.GREEN + "and with a green background") + print(Style.DIM + "and in dim text") + print(Style.RESET_ALL) + for _ in range(4): + print(Style.DIM + "Please wait...", flush=True) + sleep(1) + print("Finished") + + virtualenv_task = callable_virtualenv() + # [END howto_operator_python_venv] + + sleeping_task >> virtualenv_task + + # [START howto_operator_external_python] + @task.external_python(task_id="external_python", python=PATH_TO_PYTHON_BINARY) + def callable_external_python(): + """ + Example function that will be performed in a virtual environment. + + Importing at the module level ensures that it will not attempt to import the + library before it is installed. + """ + import sys + from time import sleep + + print(f"Running task via {sys.executable}") + print("Sleeping") + for _ in range(4): + print("Please wait...", flush=True) + sleep(1) + print("Finished") + + external_python_task = callable_external_python() + # [END howto_operator_external_python] + + run_this >> external_python_task >> virtualenv_task example_python_decorator() diff --git a/airflow/example_dags/example_python_operator.py b/airflow/example_dags/example_python_operator.py index a1ebb84ddff00..976813d53fd98 100644 --- a/airflow/example_dags/example_python_operator.py +++ b/airflow/example_dags/example_python_operator.py @@ -30,11 +30,10 @@ import pendulum from airflow.models.dag import DAG -from airflow.operators.python import ( +from airflow.providers.standard.operators.python import ( ExternalPythonOperator, PythonOperator, PythonVirtualenvOperator, - is_venv_installed, ) log = logging.getLogger(__name__) @@ -89,63 +88,60 @@ def my_sleeping_function(random_base): run_this >> log_the_sql >> sleeping_task # [END howto_operator_python_kwargs] - if not is_venv_installed(): - log.warning("The virtalenv_python example task requires virtualenv, please install it.") - else: - # [START howto_operator_python_venv] - def callable_virtualenv(): - """ - Example function that will be performed in a virtual environment. - - Importing at the function level ensures that it will not attempt to import the - library before it is installed. - """ - from time import sleep - - from colorama import Back, Fore, Style - - print(Fore.RED + "some red text") - print(Back.GREEN + "and with a green background") - print(Style.DIM + "and in dim text") - print(Style.RESET_ALL) - for _ in range(4): - print(Style.DIM + "Please wait...", flush=True) - sleep(1) - print("Finished") - - virtualenv_task = PythonVirtualenvOperator( - task_id="virtualenv_python", - python_callable=callable_virtualenv, - requirements=["colorama==0.4.0"], - system_site_packages=False, - ) - # [END howto_operator_python_venv] - - sleeping_task >> virtualenv_task - - # [START howto_operator_external_python] - def callable_external_python(): - """ - Example function that will be performed in a virtual environment. - - Importing at the module level ensures that it will not attempt to import the - library before it is installed. - """ - import sys - from time import sleep - - print(f"Running task via {sys.executable}") - print("Sleeping") - for _ in range(4): - print("Please wait...", flush=True) - sleep(1) - print("Finished") - - external_python_task = ExternalPythonOperator( - task_id="external_python", - python_callable=callable_external_python, - python=PATH_TO_PYTHON_BINARY, - ) - # [END howto_operator_external_python] + # [START howto_operator_python_venv] + def callable_virtualenv(): + """ + Example function that will be performed in a virtual environment. + + Importing at the function level ensures that it will not attempt to import the + library before it is installed. + """ + from time import sleep + + from colorama import Back, Fore, Style + + print(Fore.RED + "some red text") + print(Back.GREEN + "and with a green background") + print(Style.DIM + "and in dim text") + print(Style.RESET_ALL) + for _ in range(4): + print(Style.DIM + "Please wait...", flush=True) + sleep(1) + print("Finished") + + virtualenv_task = PythonVirtualenvOperator( + task_id="virtualenv_python", + python_callable=callable_virtualenv, + requirements=["colorama==0.4.0"], + system_site_packages=False, + ) + # [END howto_operator_python_venv] + + sleeping_task >> virtualenv_task + + # [START howto_operator_external_python] + def callable_external_python(): + """ + Example function that will be performed in a virtual environment. + + Importing at the module level ensures that it will not attempt to import the + library before it is installed. + """ + import sys + from time import sleep + + print(f"Running task via {sys.executable}") + print("Sleeping") + for _ in range(4): + print("Please wait...", flush=True) + sleep(1) + print("Finished") + + external_python_task = ExternalPythonOperator( + task_id="external_python", + python_callable=callable_external_python, + python=PATH_TO_PYTHON_BINARY, + ) + # [END howto_operator_external_python] - run_this >> external_python_task >> virtualenv_task + run_this >> external_python_task >> virtualenv_task diff --git a/airflow/example_dags/example_sensors.py b/airflow/example_dags/example_sensors.py index f639083858101..52b1b84e223e8 100644 --- a/airflow/example_dags/example_sensors.py +++ b/airflow/example_dags/example_sensors.py @@ -24,11 +24,11 @@ from airflow.models.dag import DAG from airflow.providers.standard.operators.bash import BashOperator from airflow.providers.standard.sensors.bash import BashSensor +from airflow.providers.standard.sensors.python import PythonSensor from airflow.providers.standard.sensors.time import TimeSensor, TimeSensorAsync from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync from airflow.providers.standard.sensors.weekday import DayOfWeekSensor from airflow.sensors.filesystem import FileSensor -from airflow.sensors.python import PythonSensor from airflow.utils.trigger_rule import TriggerRule from airflow.utils.weekday import WeekDay diff --git a/airflow/example_dags/example_short_circuit_operator.py b/airflow/example_dags/example_short_circuit_operator.py index 3941ff17f95a1..5ffab8a14bc0f 100644 --- a/airflow/example_dags/example_short_circuit_operator.py +++ b/airflow/example_dags/example_short_circuit_operator.py @@ -24,7 +24,7 @@ from airflow.models.baseoperator import chain from airflow.models.dag import DAG from airflow.operators.empty import EmptyOperator -from airflow.operators.python import ShortCircuitOperator +from airflow.providers.standard.operators.python import ShortCircuitOperator from airflow.utils.trigger_rule import TriggerRule with DAG( diff --git a/airflow/example_dags/tutorial_dag.py b/airflow/example_dags/tutorial_dag.py index 553b194fef0db..0e4f5086efc92 100644 --- a/airflow/example_dags/tutorial_dag.py +++ b/airflow/example_dags/tutorial_dag.py @@ -33,7 +33,7 @@ from airflow.models.dag import DAG # Operators; we need this to operate! -from airflow.operators.python import PythonOperator +from airflow.providers.standard.operators.python import PythonOperator # [END import_module] diff --git a/airflow/example_dags/tutorial_taskflow_api_virtualenv.py b/airflow/example_dags/tutorial_taskflow_api_virtualenv.py index 3860876e6e687..fd9ee3e7b5abd 100644 --- a/airflow/example_dags/tutorial_taskflow_api_virtualenv.py +++ b/airflow/example_dags/tutorial_taskflow_api_virtualenv.py @@ -21,67 +21,64 @@ from datetime import datetime from airflow.decorators import dag, task -from airflow.operators.python import is_venv_installed log = logging.getLogger(__name__) -if not is_venv_installed(): - log.warning("The tutorial_taskflow_api_virtualenv example DAG requires virtualenv, please install it.") -else: - @dag(schedule=None, start_date=datetime(2021, 1, 1), catchup=False, tags=["example"]) - def tutorial_taskflow_api_virtualenv(): +@dag(schedule=None, start_date=datetime(2021, 1, 1), catchup=False, tags=["example"]) +def tutorial_taskflow_api_virtualenv(): + """ + ### TaskFlow API example using virtualenv + This is a simple data pipeline example which demonstrates the use of + the TaskFlow API using three simple tasks for Extract, Transform, and Load. + """ + + @task.virtualenv( + serializer="dill", # Use `dill` for advanced serialization. + system_site_packages=False, + requirements=["funcsigs"], + ) + def extract(): + """ + #### Extract task + A simple Extract task to get data ready for the rest of the data + pipeline. In this case, getting data is simulated by reading from a + hardcoded JSON string. + """ + import json + + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + + order_data_dict = json.loads(data_string) + return order_data_dict + + @task(multiple_outputs=True) + def transform(order_data_dict: dict): + """ + #### Transform task + A simple Transform task which takes in the collection of order data and + computes the total order value. """ - ### TaskFlow API example using virtualenv - This is a simple data pipeline example which demonstrates the use of - the TaskFlow API using three simple tasks for Extract, Transform, and Load. + total_order_value = 0 + + for value in order_data_dict.values(): + total_order_value += value + + return {"total_order_value": total_order_value} + + @task() + def load(total_order_value: float): + """ + #### Load task + A simple Load task which takes in the result of the Transform task and + instead of saving it to end user review, just prints it out. """ - @task.virtualenv( - serializer="dill", # Use `dill` for advanced serialization. - system_site_packages=False, - requirements=["funcsigs"], - ) - def extract(): - """ - #### Extract task - A simple Extract task to get data ready for the rest of the data - pipeline. In this case, getting data is simulated by reading from a - hardcoded JSON string. - """ - import json - - data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' - - order_data_dict = json.loads(data_string) - return order_data_dict - - @task(multiple_outputs=True) - def transform(order_data_dict: dict): - """ - #### Transform task - A simple Transform task which takes in the collection of order data and - computes the total order value. - """ - total_order_value = 0 - - for value in order_data_dict.values(): - total_order_value += value - - return {"total_order_value": total_order_value} - - @task() - def load(total_order_value: float): - """ - #### Load task - A simple Load task which takes in the result of the Transform task and - instead of saving it to end user review, just prints it out. - """ - - print(f"Total order value is: {total_order_value:.2f}") - - order_data = extract() - order_summary = transform(order_data) - load(order_summary["total_order_value"]) - - tutorial_dag = tutorial_taskflow_api_virtualenv() + print(f"Total order value is: {total_order_value:.2f}") + + order_data = extract() + order_summary = transform(order_data) + load(order_summary["total_order_value"]) + + +tutorial_dag = tutorial_taskflow_api_virtualenv() diff --git a/airflow/example_dags/tutorial_taskflow_templates.py b/airflow/example_dags/tutorial_taskflow_templates.py index 925f60524b5ea..19206bff572a5 100644 --- a/airflow/example_dags/tutorial_taskflow_templates.py +++ b/airflow/example_dags/tutorial_taskflow_templates.py @@ -22,7 +22,7 @@ import pendulum from airflow.decorators import dag, task -from airflow.operators.python import get_current_context +from airflow.providers.standard.operators.python import get_current_context # [END import_module] diff --git a/airflow/exceptions.py b/airflow/exceptions.py index ccf62ca5e8178..316fe880b66bd 100644 --- a/airflow/exceptions.py +++ b/airflow/exceptions.py @@ -31,7 +31,7 @@ import datetime from collections.abc import Sized - from airflow.models import DAG, DagRun + from airflow.models import DagRun class AirflowException(Exception): @@ -273,13 +273,13 @@ class FailStopDagInvalidTriggerRule(AirflowException): _allowed_rules = (TriggerRule.ALL_SUCCESS, TriggerRule.ALL_DONE_SETUP_SUCCESS) @classmethod - def check(cls, *, dag: DAG | None, trigger_rule: TriggerRule): + def check(cls, *, fail_stop: bool, trigger_rule: TriggerRule): """ Check that fail_stop dag tasks have allowable trigger rules. :meta private: """ - if dag is not None and dag.fail_stop and trigger_rule not in cls._allowed_rules: + if fail_stop and trigger_rule not in cls._allowed_rules: raise cls() def __str__(self) -> str: diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index 80fb673cab844..aead7e2b2c11c 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -84,7 +84,7 @@ def _run_task(self, ti: TaskInstance) -> bool: key = ti.key try: params = self.tasks_params.pop(ti.key, {}) - ti.run(job_id=ti.job_id, **params) + ti.run(**params) self.success(key) return True except Exception as e: diff --git a/airflow/executors/executor_constants.py b/airflow/executors/executor_constants.py index 4e4923beb477b..65d814f28ac8f 100644 --- a/airflow/executors/executor_constants.py +++ b/airflow/executors/executor_constants.py @@ -24,7 +24,6 @@ class ConnectorSource(Enum): """Enum of supported executor import sources.""" CORE = "core" - PLUGIN = "plugin" CUSTOM_PATH = "custom path" diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index 4a940793df27f..7fc0bd63e9802 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -21,7 +21,6 @@ import functools import logging import os -from contextlib import suppress from typing import TYPE_CHECKING from airflow.api_internal.internal_api_call import InternalApiConfig @@ -95,7 +94,7 @@ def _get_executor_names(cls) -> list[ExecutorName]: # paths won't be provided by the user in that case. if core_executor_module := cls.executors.get(name): executor_names.append(ExecutorName(alias=name, module_path=core_executor_module)) - # Only a module path or plugin name was provided + # A module path was provided else: executor_names.append(ExecutorName(alias=None, module_path=name)) # An alias was provided with the module path @@ -105,12 +104,12 @@ def _get_executor_names(cls) -> list[ExecutorName]: # (e.g. my_local_exec_alias:LocalExecutor). Allowing this makes things unnecessarily # complicated. Multiple Executors of the same type will be supported by a future multitenancy # AIP. - # The module component should always be a module or plugin path. + # The module component should always be a module path. module_path = split_name[1] if not module_path or module_path in CORE_EXECUTOR_NAMES or "." not in module_path: raise AirflowConfigException( "Incorrectly formatted executor configuration. Second portion of an executor " - f"configuration must be a module path or plugin but received: {module_path}" + f"configuration must be a module path but received: {module_path}" ) else: executor_names.append(ExecutorName(alias=split_name[0], module_path=split_name[1])) @@ -118,7 +117,7 @@ def _get_executor_names(cls) -> list[ExecutorName]: raise AirflowConfigException(f"Incorrectly formatted executor configuration: {name}") # As of now, we do not allow duplicate executors. - # Add all module paths/plugin names to a set, since the actual code is what is unique + # Add all module paths to a set, since the actual code is what is unique unique_modules = set([exec_name.module_path for exec_name in executor_names]) if len(unique_modules) < len(executor_names): msg = ( @@ -217,7 +216,6 @@ def load_executor(cls, executor_name: ExecutorName | str | None) -> BaseExecutor This supports the following formats: * by executor name for core executor - * by ``{plugin_name}.{class_name}`` for executor from plugins * by import path * by class name of the Executor * by ExecutorName object specification @@ -272,7 +270,7 @@ def import_executor_cls( Supports the same formats as ExecutorLoader.load_executor. - :param executor_name: Name of core executor or module path to provider provided as a plugin. + :param executor_name: Name of core executor or module path to executor. :param validate: Whether or not to validate the executor before returning :return: executor class via executor_name and executor import source @@ -284,17 +282,6 @@ def _import_and_validate(path: str) -> type[BaseExecutor]: cls.validate_database_executor_compatibility(executor) return executor - if executor_name.connector_source == ConnectorSource.PLUGIN: - with suppress(ImportError, AttributeError): - # Load plugins here for executors as at that time the plugins might not have been - # initialized yet - from airflow import plugins_manager - - plugins_manager.integrate_executor_plugins() - return ( - _import_and_validate(f"airflow.executors.{executor_name.module_path}"), - ConnectorSource.PLUGIN, - ) return _import_and_validate(executor_name.module_path), executor_name.connector_source @classmethod diff --git a/airflow/executors/executor_utils.py b/airflow/executors/executor_utils.py index 8b67e96e4e538..016e01d8d0c3f 100644 --- a/airflow/executors/executor_utils.py +++ b/airflow/executors/executor_utils.py @@ -31,17 +31,8 @@ def __init__(self, module_path, alias=None): def set_connector_source(self): if self.alias in CORE_EXECUTOR_NAMES: self.connector_source = ConnectorSource.CORE - # If there is only one dot, then this is likely a plugin. This is the best we can do - # to determine. - elif self.module_path.count(".") == 1: - self.log.debug( - "The executor name looks like the plugin path (executor_name=%s) due to having " - "just two period delimited parts. Treating executor as a plugin", - self.module_path, - ) - self.connector_source = ConnectorSource.PLUGIN - # Executor must be a module else: + # Executor must be a module self.connector_source = ConnectorSource.CUSTOM_PATH def __repr__(self): diff --git a/airflow/hooks/base.py b/airflow/hooks/base.py index e82c838c8c331..8f95d7bfe1713 100644 --- a/airflow/hooks/base.py +++ b/airflow/hooks/base.py @@ -67,15 +67,16 @@ def get_connection(cls, conn_id: str) -> Connection: return conn @classmethod - def get_hook(cls, conn_id: str) -> BaseHook: + def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> BaseHook: """ Return default hook for this connection id. :param conn_id: connection id + :param hook_params: hook parameters :return: default hook for this connection """ connection = cls.get_connection(conn_id) - return connection.get_hook() + return connection.get_hook(hook_params=hook_params) def get_conn(self) -> Any: """Return connection for the hook.""" diff --git a/airflow/jobs/local_task_job_runner.py b/airflow/jobs/local_task_job_runner.py index cdc3c1b624694..c900c88674e74 100644 --- a/airflow/jobs/local_task_job_runner.py +++ b/airflow/jobs/local_task_job_runner.py @@ -115,9 +115,9 @@ def __init__( self._overtime = 0.0 def _execute(self) -> int | None: - from airflow.task.task_runner import get_task_runner + from airflow.task.standard_task_runner import StandardTaskRunner - self.task_runner = get_task_runner(self) + self.task_runner = StandardTaskRunner(self) # Print a marker post execution for internals of post task processing self.log.info("::group::Pre task execution logs") @@ -159,7 +159,6 @@ def sigusr2_debug_handler(signum, frame): wait_for_past_depends_before_skipping=self.wait_for_past_depends_before_skipping, ignore_task_deps=self.ignore_task_deps, ignore_ti_state=self.ignore_ti_state, - job_id=str(self.job.id), pool=self.pool, external_executor_id=self.external_executor_id, ): @@ -319,6 +318,8 @@ def heartbeat_callback(self, session: Session = NEW_SESSION) -> None: "Recorded pid %s does not match the current pid %s", recorded_pid, current_pid ) raise AirflowException("PID of job runner does not match") + ti.update_heartbeat() + elif self.task_runner.return_code() is None and hasattr(self.task_runner, "process"): self._overtime = (timezone.utcnow() - (ti.end_date or timezone.utcnow())).total_seconds() if ti.state == TaskInstanceState.SKIPPED: diff --git a/airflow/jobs/scheduler_job_runner.py b/airflow/jobs/scheduler_job_runner.py index a052bf700db7d..39e4e35087bc4 100644 --- a/airflow/jobs/scheduler_job_runner.py +++ b/airflow/jobs/scheduler_job_runner.py @@ -19,6 +19,7 @@ import itertools import multiprocessing +import operator import os import signal import sys @@ -29,7 +30,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Iterator -from sqlalchemy import and_, delete, exists, func, not_, or_, select, text, update +from sqlalchemy import and_, delete, exists, func, not_, select, text, update from sqlalchemy.exc import OperationalError from sqlalchemy.orm import lazyload, load_only, make_transient, selectinload from sqlalchemy.sql import expression @@ -55,6 +56,7 @@ from airflow.models.dag import DAG, DagModel from airflow.models.dagbag import DagBag from airflow.models.dagrun import DagRun +from airflow.models.dagwarning import DagWarning, DagWarningType from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance from airflow.stats import Stats @@ -775,7 +777,7 @@ def process_executor_events( "TaskInstance Finished: dag_id=%s, task_id=%s, run_id=%s, map_index=%s, " "run_start_date=%s, run_end_date=%s, " "run_duration=%s, state=%s, executor=%s, executor_state=%s, try_number=%s, max_tries=%s, " - "job_id=%s, pool=%s, queue=%s, priority_weight=%d, operator=%s, queued_dttm=%s, " + "pool=%s, queue=%s, priority_weight=%d, operator=%s, queued_dttm=%s, " "queued_by_job_id=%s, pid=%s" ) cls.logger().info( @@ -792,7 +794,6 @@ def process_executor_events( state, try_number, ti.max_tries, - ti.job_id, ti.pool, ti.queue, ti.priority_weight, @@ -819,7 +820,6 @@ def process_executor_events( span.set_attribute("operator", str(ti.operator)) span.set_attribute("try_number", ti.try_number) span.set_attribute("executor_state", state) - span.set_attribute("job_id", ti.job_id) span.set_attribute("pool", ti.pool) span.set_attribute("queue", ti.queue) span.set_attribute("priority_weight", ti.priority_weight) @@ -827,9 +827,12 @@ def process_executor_events( span.set_attribute("queued_by_job_id", ti.queued_by_job_id) span.set_attribute("pid", ti.pid) if span.is_recording(): - span.add_event(name="queued", timestamp=datetime_to_nano(ti.queued_dttm)) - span.add_event(name="started", timestamp=datetime_to_nano(ti.start_date)) - span.add_event(name="ended", timestamp=datetime_to_nano(ti.end_date)) + if ti.queued_dttm: + span.add_event(name="queued", timestamp=datetime_to_nano(ti.queued_dttm)) + if ti.start_date: + span.add_event(name="started", timestamp=datetime_to_nano(ti.start_date)) + if ti.end_date: + span.add_event(name="ended", timestamp=datetime_to_nano(ti.end_date)) if conf.has_option("traces", "otel_task_log_event") and conf.getboolean( "traces", "otel_task_log_event" ): @@ -1078,7 +1081,7 @@ def _run_scheduler_loop(self) -> None: timers.call_regular_interval( conf.getfloat("scheduler", "parsing_cleanup_interval"), - self._orphan_unreferenced_assets, + self._update_asset_orphanage, ) if self._standalone_dag_processor: @@ -1272,15 +1275,15 @@ def _do_scheduling(self, session: Session) -> int: @retry_db_transaction def _create_dagruns_for_dags(self, guard: CommitProhibitorGuard, session: Session) -> None: """Find Dag Models needing DagRuns and Create Dag Runs with retries in case of OperationalError.""" - query, dataset_triggered_dag_info = DagModel.dags_needing_dagruns(session) + query, asset_triggered_dag_info = DagModel.dags_needing_dagruns(session) all_dags_needing_dag_runs = set(query.all()) - dataset_triggered_dags = [ - dag for dag in all_dags_needing_dag_runs if dag.dag_id in dataset_triggered_dag_info + asset_triggered_dags = [ + dag for dag in all_dags_needing_dag_runs if dag.dag_id in asset_triggered_dag_info ] - non_dataset_dags = all_dags_needing_dag_runs.difference(dataset_triggered_dags) - self._create_dag_runs(non_dataset_dags, session) - if dataset_triggered_dags: - self._create_dag_runs_asset_triggered(dataset_triggered_dags, dataset_triggered_dag_info, session) + non_asset_dags = all_dags_needing_dag_runs.difference(asset_triggered_dags) + self._create_dag_runs(non_asset_dags, session) + if asset_triggered_dags: + self._create_dag_runs_asset_triggered(asset_triggered_dags, asset_triggered_dag_info, session) # commit the session - Release the write lock on DagModel table. guard.commit() @@ -1391,7 +1394,7 @@ def _create_dag_runs(self, dag_models: Collection[DagModel], session: Session) - def _create_dag_runs_asset_triggered( self, dag_models: Collection[DagModel], - dataset_triggered_dag_info: dict[str, tuple[datetime, datetime]], + asset_triggered_dag_info: dict[str, tuple[datetime, datetime]], session: Session, ) -> None: """For DAGs that are triggered by assets, create dag runs.""" @@ -1401,7 +1404,7 @@ def _create_dag_runs_asset_triggered( # duplicate dag runs exec_dates = { dag_id: timezone.coerce_datetime(last_time) - for dag_id, (_, last_time) in dataset_triggered_dag_info.items() + for dag_id, (_, last_time) in asset_triggered_dag_info.items() } existing_dagruns: set[tuple[str, timezone.DateTime]] = set( session.execute( @@ -1419,7 +1422,7 @@ def _create_dag_runs_asset_triggered( if not isinstance(dag.timetable, AssetTriggeredTimetable): self.log.error( - "DAG '%s' was asset-scheduled, but didn't have a AssetTriggeredTimetable!", + "DAG '%s' was asset-scheduled, but didn't have an AssetTriggeredTimetable!", dag_model.dag_id, ) continue @@ -1441,7 +1444,7 @@ def _create_dag_runs_asset_triggered( .where( DagRun.dag_id == dag.dag_id, DagRun.execution_date < exec_date, - DagRun.run_type == DagRunType.DATASET_TRIGGERED, + DagRun.run_type == DagRunType.ASSET_TRIGGERED, ) .order_by(DagRun.execution_date.desc()) .limit(1) @@ -1457,14 +1460,14 @@ def _create_dag_runs_asset_triggered( select(AssetEvent) .join( DagScheduleAssetReference, - AssetEvent.dataset_id == DagScheduleAssetReference.dataset_id, + AssetEvent.asset_id == DagScheduleAssetReference.asset_id, ) .where(*asset_event_filters) ).all() data_interval = dag.timetable.data_interval_for_events(exec_date, asset_events) run_id = dag.timetable.generate_run_id( - run_type=DagRunType.DATASET_TRIGGERED, + run_type=DagRunType.ASSET_TRIGGERED, logical_date=exec_date, data_interval=data_interval, session=session, @@ -1473,7 +1476,7 @@ def _create_dag_runs_asset_triggered( dag_run = dag.create_dagrun( run_id=run_id, - run_type=DagRunType.DATASET_TRIGGERED, + run_type=DagRunType.ASSET_TRIGGERED, execution_date=exec_date, data_interval=data_interval, state=DagRunState.QUEUED, @@ -1481,10 +1484,10 @@ def _create_dag_runs_asset_triggered( session=session, dag_hash=dag_hash, creating_job_id=self.job.id, - triggered_by=DagRunTriggeredByType.DATASET, + triggered_by=DagRunTriggeredByType.ASSET, ) Stats.incr("asset.triggered_dagruns") - dag_run.consumed_dataset_events.extend(asset_events) + dag_run.consumed_asset_events.extend(asset_events) session.execute( delete(AssetDagRunQueue).where(AssetDagRunQueue.target_dag_id == dag_run.dag_id) ) @@ -1972,22 +1975,20 @@ def _find_and_purge_zombies(self) -> None: self._purge_zombies(zombies, session=session) def _find_zombies(self, *, session: Session) -> list[tuple[TI, str, str]]: - from airflow.jobs.job import Job - self.log.debug("Finding 'running' jobs without a recent heartbeat") limit_dttm = timezone.utcnow() - timedelta(seconds=self._zombie_threshold_secs) zombies = session.execute( select(TI, DM.fileloc, DM.processor_subdir) .with_hint(TI, "USE INDEX (ti_state)", dialect_name="mysql") - .join(Job, TI.job_id == Job.id) .join(DM, TI.dag_id == DM.dag_id) - .where(TI.state == TaskInstanceState.RUNNING) - .where(or_(Job.state != JobState.RUNNING, Job.latest_heartbeat < limit_dttm)) - .where(Job.job_type == "LocalTaskJob") + .where( + TI.state.in_((TaskInstanceState.RUNNING, TaskInstanceState.RESTARTING)), + TI.last_heartbeat_at < limit_dttm, + ) .where(TI.queued_by_job_id == self.job.id) ).all() if zombies: - self.log.warning("Failing (%s) jobs without heartbeat after %s", len(zombies), limit_dttm) + self.log.warning("Failing %s TIs without heartbeat after %s", len(zombies), limit_dttm) return zombies def _purge_zombies(self, zombies: list[tuple[TI, str, str]], *, session: Session) -> None: @@ -2068,44 +2069,106 @@ def _cleanup_stale_dags(self, session: Session = NEW_SESSION) -> None: SerializedDagModel.remove_dag(dag_id=dag.dag_id, session=session) session.flush() - def _get_orphaning_identifier(self, asset: AssetModel) -> tuple[str, str]: - self.log.info("Orphaning unreferenced %s", asset) - return asset.name, asset.uri - @provide_session - def _orphan_unreferenced_assets(self, session: Session = NEW_SESSION) -> None: + def _update_asset_orphanage(self, session: Session = NEW_SESSION) -> None: """ - Detect orphaned assets and remove their active entry. + Check assets orphanization and update their active entry. - An orphaned asset is no longer referenced in any DAG schedule parameters or task outlets. + An orphaned asset is no longer referenced in any DAG schedule parameters + or task outlets. Active assets (non-orphaned) have entries in AssetActive + and must have unique names and URIs. """ - orphaned_asset_query = session.scalars( - select(AssetModel) - .join( - DagScheduleAssetReference, - isouter=True, - ) - .join( - TaskOutletAssetReference, - isouter=True, - ) + # Group assets into orphaned=True and orphaned=False groups. + orphaned = ( + (func.count(DagScheduleAssetReference.dag_id) + func.count(TaskOutletAssetReference.dag_id)) == 0 + ).label("orphaned") + asset_reference_query = session.execute( + select(orphaned, AssetModel) + .outerjoin(DagScheduleAssetReference) + .outerjoin(TaskOutletAssetReference) .group_by(AssetModel.id) - .where(AssetModel.active.has()) - .having( - and_( - func.count(DagScheduleAssetReference.dag_id) == 0, - func.count(TaskOutletAssetReference.dag_id) == 0, + .order_by(orphaned) + ) + asset_orphanation: dict[bool, Collection[AssetModel]] = { + orphaned: [asset for _, asset in group] + for orphaned, group in itertools.groupby(asset_reference_query, key=operator.itemgetter(0)) + } + self._orphan_unreferenced_assets(asset_orphanation.get(True, ()), session=session) + self._activate_referenced_assets(asset_orphanation.get(False, ()), session=session) + + @staticmethod + def _orphan_unreferenced_assets(assets: Collection[AssetModel], *, session: Session) -> None: + if assets: + session.execute( + delete(AssetActive).where( + tuple_in_condition((AssetActive.name, AssetActive.uri), ((a.name, a.uri) for a in assets)) + ) + ) + Stats.gauge("asset.orphaned", len(assets)) + + @staticmethod + def _activate_referenced_assets(assets: Collection[AssetModel], *, session: Session) -> None: + if not assets: + return + + active_assets = set( + session.execute( + select(AssetActive.name, AssetActive.uri).where( + tuple_in_condition((AssetActive.name, AssetActive.uri), ((a.name, a.uri) for a in assets)) ) ) ) - orphaning_identifiers = [self._get_orphaning_identifier(asset) for asset in orphaned_asset_query] + active_name_to_uri: dict[str, str] = {name: uri for name, uri in active_assets} + active_uri_to_name: dict[str, str] = {uri: name for name, uri in active_assets} + + def _generate_dag_warnings(offending: AssetModel, attr: str, value: str) -> Iterator[DagWarning]: + for ref in itertools.chain(offending.consuming_dags, offending.producing_tasks): + yield DagWarning( + dag_id=ref.dag_id, + error_type=DagWarningType.ASSET_CONFLICT, + message=f"Cannot activate asset {offending}; {attr} is already associated to {value!r}", + ) + + def _activate_assets_generate_warnings() -> Iterator[DagWarning]: + incoming_name_to_uri: dict[str, str] = {} + incoming_uri_to_name: dict[str, str] = {} + for asset in assets: + if (asset.name, asset.uri) in active_assets: + continue + existing_uri = active_name_to_uri.get(asset.name) or incoming_name_to_uri.get(asset.name) + if existing_uri is not None and existing_uri != asset.uri: + yield from _generate_dag_warnings(asset, "name", existing_uri) + continue + existing_name = active_uri_to_name.get(asset.uri) or incoming_uri_to_name.get(asset.uri) + if existing_name is not None and existing_name != asset.name: + yield from _generate_dag_warnings(asset, "uri", existing_name) + continue + incoming_name_to_uri[asset.name] = asset.uri + incoming_uri_to_name[asset.uri] = asset.name + session.add(AssetActive.for_asset(asset)) + + warnings_to_have = {w.dag_id: w for w in _activate_assets_generate_warnings()} session.execute( - delete(AssetActive).where( - tuple_in_condition((AssetActive.name, AssetActive.uri), orphaning_identifiers) + delete(DagWarning).where( + DagWarning.warning_type == DagWarningType.ASSET_CONFLICT, + DagWarning.dag_id.not_in(warnings_to_have), ) ) - Stats.gauge("asset.orphaned", len(orphaning_identifiers)) + existing_warned_dag_ids: set[str] = set( + session.scalars( + select(DagWarning.dag_id).where( + DagWarning.warning_type == DagWarningType.ASSET_CONFLICT, + DagWarning.dag_id.not_in(warnings_to_have), + ) + ) + ) + for dag_id, warning in warnings_to_have.items(): + if dag_id in existing_warned_dag_ids: + session.merge(warning) + continue + session.add(warning) + existing_warned_dag_ids.add(warning.dag_id) def _executor_to_tis(self, tis: list[TaskInstance]) -> dict[BaseExecutor, list[TaskInstance]]: """Organize TIs into lists per their respective executor.""" diff --git a/airflow/listeners/spec/asset.py b/airflow/listeners/spec/asset.py index 78b14c8b10aeb..dba9ac700e415 100644 --- a/airflow/listeners/spec/asset.py +++ b/airflow/listeners/spec/asset.py @@ -33,8 +33,8 @@ def on_asset_created(asset: Asset): @hookspec -def on_asset_alias_created(dataset_alias: AssetAlias): - """Execute when a new dataset alias is created.""" +def on_asset_alias_created(asset_alias: AssetAlias): + """Execute when a new asset alias is created.""" @hookspec diff --git a/airflow/migrations/env.py b/airflow/migrations/env.py index 4be8ac32ca252..9bf9dfec0054b 100644 --- a/airflow/migrations/env.py +++ b/airflow/migrations/env.py @@ -19,6 +19,7 @@ import contextlib import sys +from logging import getLogger from logging.config import fileConfig from alembic import context @@ -48,7 +49,8 @@ def include_object(_, name, type_, *args): # Interpret the config file for Python logging. # This line sets up loggers basically. -fileConfig(config.config_file_name, disable_existing_loggers=False) +if not getLogger().handlers: + fileConfig(config.config_file_name, disable_existing_loggers=False) # add your model's MetaData object here # for 'autogenerate' support diff --git a/airflow/migrations/script.py.mako b/airflow/migrations/script.py.mako index 664b20b60bf8b..81181d9d631b8 100644 --- a/airflow/migrations/script.py.mako +++ b/airflow/migrations/script.py.mako @@ -29,8 +29,8 @@ from alembic import op ${imports if imports else ""} # revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} +revision = "${up_revision}" +down_revision = "${down_revision}" branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} diff --git a/airflow/migrations/versions/0026_2_10_0_dag_schedule_dataset_alias_reference.py b/airflow/migrations/versions/0026_2_10_0_dag_schedule_dataset_alias_reference.py index a577a3eb78138..f4c11a7b006a8 100644 --- a/airflow/migrations/versions/0026_2_10_0_dag_schedule_dataset_alias_reference.py +++ b/airflow/migrations/versions/0026_2_10_0_dag_schedule_dataset_alias_reference.py @@ -45,7 +45,7 @@ def upgrade(): """Add dag_schedule_dataset_alias_reference table.""" op.create_table( "dag_schedule_dataset_alias_reference", - sa.Column("alias_id", sa.Integer(), nullable=False), + sa.Column("alias_id", sa.Integer(), primary_key=True, nullable=False), sa.Column("dag_id", StringID(), primary_key=True, nullable=False), sa.Column("created_at", airflow.utils.sqlalchemy.UtcDateTime(timezone=True), nullable=False), sa.Column("updated_at", airflow.utils.sqlalchemy.UtcDateTime(timezone=True), nullable=False), @@ -58,7 +58,7 @@ def upgrade(): sa.ForeignKeyConstraint( columns=("dag_id",), refcolumns=["dag.dag_id"], - name="dsdar_dag_id_fkey", + name="dsdar_dag_fkey", ondelete="CASCADE", ), sa.PrimaryKeyConstraint("alias_id", "dag_id", name="dsdar_pkey"), diff --git a/airflow/migrations/versions/0027_2_10_3_fix_dag_schedule_dataset_alias_reference_naming.py b/airflow/migrations/versions/0027_2_10_3_fix_dag_schedule_dataset_alias_reference_naming.py new file mode 100644 index 0000000000000..8fb02d3dcf193 --- /dev/null +++ b/airflow/migrations/versions/0027_2_10_3_fix_dag_schedule_dataset_alias_reference_naming.py @@ -0,0 +1,129 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Rename dag_schedule_dataset_alias_reference constraint names. + +Revision ID: 5f2621c13b39 +Revises: 22ed7efa9da2 +Create Date: 2024-10-25 04:03:33.002701 + +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from alembic import op +from sqlalchemy import inspect + +# revision identifiers, used by Alembic. +revision = "5f2621c13b39" +down_revision = "22ed7efa9da2" +branch_labels = None +depends_on = None +airflow_version = "2.10.3" + +if TYPE_CHECKING: + from alembic.operations.base import BatchOperations + from sqlalchemy.sql.elements import conv + + +def _rename_fk_constraint( + *, + batch_op: BatchOperations, + original_name: str | conv, + new_name: str | conv, + referent_table: str, + local_cols: list[str], + remote_cols: list[str], + ondelete: str, +) -> None: + batch_op.drop_constraint(original_name, type_="foreignkey") + batch_op.create_foreign_key( + constraint_name=new_name, + referent_table=referent_table, + local_cols=local_cols, + remote_cols=remote_cols, + ondelete=ondelete, + ) + + +def upgrade(): + """Rename dag_schedule_dataset_alias_reference constraint.""" + with op.batch_alter_table("dag_schedule_dataset_alias_reference", schema=None) as batch_op: + bind = op.get_context().bind + insp = inspect(bind) + fk_constraints = [fk["name"] for fk in insp.get_foreign_keys("dag_schedule_dataset_alias_reference")] + + # "dsdar_dataset_alias_fkey" was the constraint name defined in the model while "dsdar_dataset_fkey" is the one + # defined in the previous migration. + # Rename this constraint name if user is using the name "dsdar_dataset_fkey". + if "dsdar_dataset_fkey" in fk_constraints: + _rename_fk_constraint( + batch_op=batch_op, + original_name="dsdar_dataset_fkey", + new_name="dsdar_dataset_alias_fkey", + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + # "dsdar_dag_fkey" was the constraint name defined in the model while "dsdar_dag_id_fkey" is the one + # defined in the previous migration. + # Rename this constraint name if user is using the name "dsdar_dag_fkey". + if "dsdar_dag_fkey" in fk_constraints: + _rename_fk_constraint( + batch_op=batch_op, + original_name="dsdar_dag_fkey", + new_name="dsdar_dag_id_fkey", + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + +def downgrade(): + """Undo dag_schedule_dataset_alias_reference constraint rename.""" + with op.batch_alter_table("dag_schedule_dataset_alias_reference", schema=None) as batch_op: + bind = op.get_context().bind + insp = inspect(bind) + fk_constraints = [fk["name"] for fk in insp.get_foreign_keys("dag_schedule_dataset_alias_reference")] + if "dsdar_dataset_alias_fkey" in fk_constraints: + _rename_fk_constraint( + batch_op=batch_op, + original_name="dsdar_dataset_alias_fkey", + new_name="dsdar_dataset_fkey", + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + if "dsdar_dag_id_fkey" in fk_constraints: + _rename_fk_constraint( + batch_op=batch_op, + original_name="dsdar_dag_id_fkey", + new_name="dsdar_dag_fkey", + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) diff --git a/airflow/migrations/versions/0027_3_0_0_drop_ab_user_id_foreign_key.py b/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py similarity index 97% rename from airflow/migrations/versions/0027_3_0_0_drop_ab_user_id_foreign_key.py rename to airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py index 5c19559488523..f88aaa014bb3a 100644 --- a/airflow/migrations/versions/0027_3_0_0_drop_ab_user_id_foreign_key.py +++ b/airflow/migrations/versions/0028_3_0_0_drop_ab_user_id_foreign_key.py @@ -20,7 +20,7 @@ Drop ab_user.id foreign key. Revision ID: 044f740568ec -Revises: 22ed7efa9da2 +Revises: 5f2621c13b39 Create Date: 2024-08-02 07:18:29.830521 """ @@ -31,7 +31,7 @@ # revision identifiers, used by Alembic. revision = "044f740568ec" -down_revision = "22ed7efa9da2" +down_revision = "5f2621c13b39" branch_labels = None depends_on = None airflow_version = "3.0.0" diff --git a/airflow/migrations/versions/0028_3_0_0_remove_is_subdag.py b/airflow/migrations/versions/0029_3_0_0_remove_is_subdag.py similarity index 100% rename from airflow/migrations/versions/0028_3_0_0_remove_is_subdag.py rename to airflow/migrations/versions/0029_3_0_0_remove_is_subdag.py diff --git a/airflow/migrations/versions/0029_3_0_0_rename_schedule_interval_to_timetable_.py b/airflow/migrations/versions/0030_3_0_0_rename_schedule_interval_to_timetable_.py similarity index 100% rename from airflow/migrations/versions/0029_3_0_0_rename_schedule_interval_to_timetable_.py rename to airflow/migrations/versions/0030_3_0_0_rename_schedule_interval_to_timetable_.py diff --git a/airflow/migrations/versions/0030_3_0_0_add_triggered_by_field_to_dagrun.py b/airflow/migrations/versions/0031_3_0_0_add_triggered_by_field_to_dagrun.py similarity index 100% rename from airflow/migrations/versions/0030_3_0_0_add_triggered_by_field_to_dagrun.py rename to airflow/migrations/versions/0031_3_0_0_add_triggered_by_field_to_dagrun.py diff --git a/airflow/migrations/versions/0031_3_0_0_drop_execution_date_unique.py b/airflow/migrations/versions/0032_3_0_0_drop_execution_date_unique.py similarity index 100% rename from airflow/migrations/versions/0031_3_0_0_drop_execution_date_unique.py rename to airflow/migrations/versions/0032_3_0_0_drop_execution_date_unique.py diff --git a/airflow/migrations/versions/0032_3_0_0_add_tables_for_backfill.py b/airflow/migrations/versions/0033_3_0_0_add_tables_for_backfill.py similarity index 100% rename from airflow/migrations/versions/0032_3_0_0_add_tables_for_backfill.py rename to airflow/migrations/versions/0033_3_0_0_add_tables_for_backfill.py diff --git a/airflow/migrations/versions/0033_3_0_0_remove_redundant_index.py b/airflow/migrations/versions/0034_3_0_0_remove_redundant_index.py similarity index 100% rename from airflow/migrations/versions/0033_3_0_0_remove_redundant_index.py rename to airflow/migrations/versions/0034_3_0_0_remove_redundant_index.py diff --git a/airflow/migrations/versions/0034_3_0_0_update_user_id_type.py b/airflow/migrations/versions/0035_3_0_0_update_user_id_type.py similarity index 100% rename from airflow/migrations/versions/0034_3_0_0_update_user_id_type.py rename to airflow/migrations/versions/0035_3_0_0_update_user_id_type.py diff --git a/airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py b/airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py similarity index 96% rename from airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py rename to airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py index 2460b6956cff6..353dcbf0f8fdd 100644 --- a/airflow/migrations/versions/0035_3_0_0_add_name_field_to_dataset_model.py +++ b/airflow/migrations/versions/0036_3_0_0_add_name_field_to_dataset_model.py @@ -61,7 +61,9 @@ def upgrade(): # Add 'name' column. Set it to nullable for now. with op.batch_alter_table("dataset", schema=None) as batch_op: batch_op.add_column(sa.Column("name", _STRING_COLUMN_TYPE)) - batch_op.add_column(sa.Column("group", _STRING_COLUMN_TYPE, default=str, nullable=False)) + batch_op.add_column( + sa.Column("group", _STRING_COLUMN_TYPE, default=str, server_default="", nullable=False) + ) # Fill name from uri column. with Session(bind=op.get_bind()) as session: session.execute(sa.text("update dataset set name=uri")) diff --git a/airflow/migrations/versions/0036_3_0_0_add_backfill_to_dag_run_model.py b/airflow/migrations/versions/0037_3_0_0_add_backfill_to_dag_run_model.py similarity index 100% rename from airflow/migrations/versions/0036_3_0_0_add_backfill_to_dag_run_model.py rename to airflow/migrations/versions/0037_3_0_0_add_backfill_to_dag_run_model.py diff --git a/airflow/migrations/versions/0037_3_0_0_add_asset_active.py b/airflow/migrations/versions/0038_3_0_0_add_asset_active.py similarity index 100% rename from airflow/migrations/versions/0037_3_0_0_add_asset_active.py rename to airflow/migrations/versions/0038_3_0_0_add_asset_active.py diff --git a/airflow/migrations/versions/0038_3_0_0_tweak_assetaliasmodel_to_match_asset.py b/airflow/migrations/versions/0039_3_0_0_tweak_assetaliasmodel_to_match_asset.py similarity index 100% rename from airflow/migrations/versions/0038_3_0_0_tweak_assetaliasmodel_to_match_asset.py rename to airflow/migrations/versions/0039_3_0_0_tweak_assetaliasmodel_to_match_asset.py diff --git a/airflow/migrations/versions/0040_3_0_0_add_exception_reason_and_logical_date_.py b/airflow/migrations/versions/0040_3_0_0_add_exception_reason_and_logical_date_.py new file mode 100644 index 0000000000000..c4f96fb0ebf35 --- /dev/null +++ b/airflow/migrations/versions/0040_3_0_0_add_exception_reason_and_logical_date_.py @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add exception_reason and logical_date to BackfillDagRun. + +Revision ID: 3a8972ecb8f9 +Revises: fb2d4922cd79 +Create Date: 2024-10-18 16:24:38.932005 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.utils.sqlalchemy import UtcDateTime + +revision = "3a8972ecb8f9" +down_revision = "fb2d4922cd79" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + """Apply Add exception_reason and logical_date to BackfillDagRun.""" + with op.batch_alter_table("backfill", schema=None) as batch_op: + batch_op.add_column(sa.Column("reprocess_behavior", sa.String(length=250), nullable=True)) + with op.batch_alter_table("backfill_dag_run", schema=None) as batch_op: + batch_op.add_column(sa.Column("exception_reason", sa.String(length=250), nullable=True)) + batch_op.add_column(sa.Column("logical_date", UtcDateTime(timezone=True), nullable=False)) + + +def downgrade(): + """Unapply Add exception_reason and logical_date to BackfillDagRun.""" + with op.batch_alter_table("backfill", schema=None) as batch_op: + batch_op.drop_column("reprocess_behavior") + with op.batch_alter_table("backfill_dag_run", schema=None) as batch_op: + batch_op.drop_column("logical_date") + batch_op.drop_column("exception_reason") diff --git a/airflow/migrations/versions/0041_3_0_0_rename_dataset_as_asset.py b/airflow/migrations/versions/0041_3_0_0_rename_dataset_as_asset.py new file mode 100644 index 0000000000000..03836503efe62 --- /dev/null +++ b/airflow/migrations/versions/0041_3_0_0_rename_dataset_as_asset.py @@ -0,0 +1,694 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Rename dataset as asset. + +Revision ID: 05234396c6fc +Revises: 3a8972ecb8f9 +Create Date: 2024-10-02 08:10:01.697128 +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import sqlalchemy as sa +import sqlalchemy_jsonfield +from alembic import op + +from airflow.settings import json + +# revision identifiers, used by Alembic. +revision = "05234396c6fc" +down_revision = "3a8972ecb8f9" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + +if TYPE_CHECKING: + from alembic.operations.base import BatchOperations + from sqlalchemy.sql.elements import conv + + +def _rename_index( + *, batch_op: BatchOperations, original_name: str, new_name: str, columns: list[str], unique: bool +) -> None: + batch_op.drop_index(original_name) + batch_op.create_index(new_name, columns, unique=unique) + + +def _rename_fk_constraint( + *, + batch_op: BatchOperations, + original_name: str | conv, + new_name: str | conv, + referent_table: str, + local_cols: list[str], + remote_cols: list[str], + ondelete: str, +) -> None: + batch_op.drop_constraint(original_name, type_="foreignkey") + batch_op.create_foreign_key( + constraint_name=new_name, + referent_table=referent_table, + local_cols=local_cols, + remote_cols=remote_cols, + ondelete=ondelete, + ) + + +def _rename_pk_constraint( + *, batch_op: BatchOperations, original_name: str, new_name: str, columns: list[str] +) -> None: + if batch_op.get_bind().dialect.name in ("postgresql", "mysql"): + batch_op.drop_constraint(original_name, type_="primary") + batch_op.create_primary_key(constraint_name=new_name, columns=columns) + + +# original table name to new table name +table_name_mappings = ( + ("dataset_alias_dataset", "asset_alias_asset"), + ("dataset_alias_dataset_event", "asset_alias_asset_event"), + ("dataset_alias", "asset_alias"), + ("dataset", "asset"), + ("dag_schedule_dataset_alias_reference", "dag_schedule_asset_alias_reference"), + ("dag_schedule_dataset_reference", "dag_schedule_asset_reference"), + ("task_outlet_dataset_reference", "task_outlet_asset_reference"), + ("dataset_dag_run_queue", "asset_dag_run_queue"), + ("dagrun_dataset_event", "dagrun_asset_event"), + ("dataset_event", "asset_event"), +) + + +def upgrade(): + """Rename dataset as asset.""" + # Rename tables + for original_name, new_name in table_name_mappings: + op.rename_table(original_name, new_name) + + with op.batch_alter_table("asset_active", schema=None) as batch_op: + batch_op.drop_constraint("asset_active_asset_name_uri_fkey", type_="foreignkey") + + with op.batch_alter_table("asset", schema=None) as batch_op: + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_name_uri_unique", + new_name="idx_asset_name_uri_unique", + columns=["name", "uri"], + unique=True, + ) + + with op.batch_alter_table("asset_active", schema=None) as batch_op: + batch_op.create_foreign_key( + constraint_name="asset_active_asset_name_uri_fkey", + referent_table="asset", + local_cols=["name", "uri"], + remote_cols=["name", "uri"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("asset_alias_asset", schema=None) as batch_op: + batch_op.alter_column("dataset_id", new_column_name="asset_id", type_=sa.Integer(), nullable=False) + + with op.batch_alter_table("asset_alias_asset", schema=None) as batch_op: + batch_op.drop_constraint(op.f("dataset_alias_dataset_alias_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_alias_dataset_alias_id", + new_name="idx_asset_alias_asset_alias_id", + columns=["alias_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name="asset_alias_asset_alias_id_fkey", + referent_table="asset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + batch_op.drop_constraint(op.f("dataset_alias_dataset_dataset_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_alias_dataset_alias_dataset_id", + new_name="idx_asset_alias_asset_asset_id", + columns=["asset_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name="asset_alias_asset_asset_id_fkey", + referent_table="asset", + local_cols=["asset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("asset_alias_asset_event", schema=None) as batch_op: + batch_op.drop_constraint(op.f("dataset_alias_dataset_event_alias_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_alias_dataset_event_alias_id", + new_name="idx_asset_alias_asset_event_alias_id", + columns=["alias_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name=op.f("asset_alias_asset_event_alias_id_fkey"), + referent_table="asset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + batch_op.drop_constraint(op.f("dataset_alias_dataset_event_event_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_alias_dataset_event_event_id", + new_name="idx_asset_alias_asset_event_event_id", + columns=["event_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name=op.f("asset_alias_asset_event_event_id_fkey"), + referent_table="asset_event", + local_cols=["event_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dag_schedule_asset_alias_reference", schema=None) as batch_op: + batch_op.drop_constraint("dsdar_dataset_alias_fkey", type_="foreignkey") + if op.get_bind().dialect.name in ("postgresql", "mysql"): + batch_op.drop_constraint("dsdar_dag_id_fkey", type_="foreignkey") + + _rename_pk_constraint( + batch_op=batch_op, + original_name="dsdar_pkey", + new_name="dsaar_pkey", + columns=["alias_id", "dag_id"], + ) + _rename_index( + batch_op=batch_op, + original_name="idx_dag_schedule_dataset_alias_reference_dag_id", + new_name="idx_dag_schedule_asset_alias_reference_dag_id", + columns=["dag_id"], + unique=False, + ) + + batch_op.create_foreign_key( + constraint_name="dsaar_asset_alias_fkey", + referent_table="asset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="dsaar_dag_id_fkey", + referent_table="dag", + local_cols=["dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dag_schedule_asset_reference", schema=None) as batch_op: + batch_op.alter_column("dataset_id", new_column_name="asset_id", type_=sa.Integer(), nullable=False) + + with op.batch_alter_table("dag_schedule_asset_reference", schema=None) as batch_op: + batch_op.drop_constraint("dsdr_dag_id_fkey", type_="foreignkey") + if op.get_bind().dialect.name in ("postgresql", "mysql"): + batch_op.drop_constraint("dsdr_dataset_fkey", type_="foreignkey") + + _rename_pk_constraint( + batch_op=batch_op, + original_name="dsdr_pkey", + new_name="dsar_pkey", + columns=["asset_id", "dag_id"], + ) + _rename_index( + batch_op=batch_op, + original_name="idx_dag_schedule_dataset_reference_dag_id", + new_name="idx_dag_schedule_asset_reference_dag_id", + columns=["dag_id"], + unique=False, + ) + + batch_op.create_foreign_key( + constraint_name="dsar_dag_id_fkey", + referent_table="dag", + local_cols=["dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="dsar_asset_fkey", + referent_table="asset", + local_cols=["asset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("task_outlet_asset_reference", schema=None) as batch_op: + batch_op.alter_column("dataset_id", new_column_name="asset_id", type_=sa.Integer(), nullable=False) + + batch_op.drop_constraint("todr_dag_id_fkey", type_="foreignkey") + if op.get_bind().dialect.name in ("postgresql", "mysql"): + batch_op.drop_constraint("todr_dataset_fkey", type_="foreignkey") + + _rename_pk_constraint( + batch_op=batch_op, + original_name="todr_pkey", + new_name="toar_pkey", + columns=["asset_id", "dag_id", "task_id"], + ) + + _rename_index( + batch_op=batch_op, + original_name="idx_task_outlet_dataset_reference_dag_id", + new_name="idx_task_outlet_asset_reference_dag_id", + columns=["dag_id"], + unique=False, + ) + + batch_op.create_foreign_key( + constraint_name="toar_asset_fkey", + referent_table="asset", + local_cols=["asset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="toar_dag_id_fkey", + referent_table="dag", + local_cols=["dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("asset_dag_run_queue", schema=None) as batch_op: + batch_op.alter_column("dataset_id", new_column_name="asset_id", type_=sa.Integer(), nullable=False) + + batch_op.drop_constraint("ddrq_dag_fkey", type_="foreignkey") + if op.get_bind().dialect.name in ("postgresql", "mysql"): + batch_op.drop_constraint("ddrq_dataset_fkey", type_="foreignkey") + + _rename_pk_constraint( + batch_op=batch_op, + original_name="datasetdagrunqueue_pkey", + new_name="assetdagrunqueue_pkey", + columns=["asset_id", "target_dag_id"], + ) + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_dag_run_queue_target_dag_id", + new_name="idx_asset_dag_run_queue_target_dag_id", + columns=["target_dag_id"], + unique=False, + ) + + batch_op.create_foreign_key( + constraint_name="adrq_asset_fkey", + referent_table="asset", + local_cols=["asset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="adrq_dag_fkey", + referent_table="dag", + local_cols=["target_dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dagrun_asset_event", schema=None) as batch_op: + batch_op.drop_constraint("dagrun_dataset_event_event_id_fkey", type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dagrun_dataset_events_dag_run_id", + new_name="idx_dagrun_asset_events_dag_run_id", + columns=["dag_run_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name="dagrun_asset_event_dag_run_id_fkey", + referent_table="dag_run", + local_cols=["dag_run_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + batch_op.drop_constraint("dagrun_dataset_event_dag_run_id_fkey", type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dagrun_dataset_events_event_id", + new_name="idx_dagrun_asset_events_event_id", + columns=["event_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name="dagrun_asset_event_event_id_fkey", + referent_table="asset_event", + local_cols=["event_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("asset_event", schema=None) as batch_op: + batch_op.alter_column("dataset_id", new_column_name="asset_id", type_=sa.Integer(), nullable=False) + + with op.batch_alter_table("asset_event", schema=None) as batch_op: + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_id_timestamp", + new_name="idx_asset_id_timestamp", + columns=["asset_id", "timestamp"], + unique=False, + ) + + with op.batch_alter_table("asset_alias", schema=None) as batch_op: + _rename_index( + batch_op=batch_op, + original_name="idx_dataset_alias_name_unique", + new_name="idx_asset_alias_name_unique", + columns=["name"], + unique=True, + ) + + with op.batch_alter_table("dag", schema=None) as batch_op: + batch_op.alter_column( + "dataset_expression", + new_column_name="asset_expression", + type_=sqlalchemy_jsonfield.JSONField(json=json), + ) + + +def downgrade(): + """Unapply Rename dataset as asset.""" + # Rename tables + for original_name, new_name in table_name_mappings: + op.rename_table(new_name, original_name) + + with op.batch_alter_table("asset_active", schema=None) as batch_op: + batch_op.drop_constraint("asset_active_asset_name_uri_fkey", type_="foreignkey") + + with op.batch_alter_table("dataset", schema=None) as batch_op: + _rename_index( + batch_op=batch_op, + original_name="idx_asset_name_uri_unique", + new_name="idx_dataset_name_uri_unique", + columns=["name", "uri"], + unique=True, + ) + + with op.batch_alter_table("asset_active", schema=None) as batch_op: + batch_op.create_foreign_key( + constraint_name="asset_active_asset_name_uri_fkey", + referent_table="dataset", + local_cols=["name", "uri"], + remote_cols=["name", "uri"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dataset_alias_dataset", schema=None) as batch_op: + batch_op.alter_column("asset_id", new_column_name="dataset_id", type_=sa.Integer(), nullable=False) + + with op.batch_alter_table("dataset_alias_dataset", schema=None) as batch_op: + batch_op.drop_constraint(op.f("asset_alias_asset_alias_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_asset_alias_asset_alias_id", + new_name="idx_dataset_alias_dataset_alias_id", + columns=["alias_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name=op.f("dataset_alias_dataset_alias_id_fkey"), + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + batch_op.drop_constraint(op.f("asset_alias_asset_asset_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_asset_alias_asset_asset_id", + new_name="idx_dataset_alias_dataset_alias_dataset_id", + columns=["dataset_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name=op.f("dataset_alias_dataset_dataset_id_fkey"), + referent_table="dataset", + local_cols=["dataset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dataset_alias_dataset_event", schema=None) as batch_op: + batch_op.drop_constraint(op.f("asset_alias_asset_event_alias_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_asset_alias_asset_event_alias_id", + new_name="idx_dataset_alias_dataset_event_alias_id", + columns=["alias_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name=op.f("dataset_alias_dataset_event_alias_id_fkey"), + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + batch_op.drop_constraint(op.f("asset_alias_asset_event_event_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_asset_alias_asset_event_event_id", + new_name="idx_dataset_alias_dataset_event_event_id", + columns=["event_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name=op.f("dataset_alias_dataset_event_event_id_fkey"), + referent_table="dataset_event", + local_cols=["event_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dag_schedule_dataset_alias_reference", schema=None) as batch_op: + batch_op.drop_constraint("dsaar_asset_alias_fkey", type_="foreignkey") + batch_op.drop_constraint("dsaar_dag_id_fkey", type_="foreignkey") + + _rename_pk_constraint( + batch_op=batch_op, + original_name="dsaar_pkey", + new_name="dsdar_pkey", + columns=["alias_id", "dag_id"], + ) + _rename_index( + batch_op=batch_op, + original_name="idx_dag_schedule_asset_alias_reference_dag_id", + new_name="idx_dag_schedule_dataset_alias_reference_dag_id", + columns=["dag_id"], + unique=False, + ) + + batch_op.create_foreign_key( + constraint_name="dsdar_dataset_alias_fkey", + referent_table="dataset_alias", + local_cols=["alias_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="dsdar_dag_id_fkey", + referent_table="dag", + local_cols=["dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dag_schedule_dataset_reference", schema=None) as batch_op: + batch_op.alter_column("asset_id", new_column_name="dataset_id", type_=sa.Integer(), nullable=False) + + batch_op.drop_constraint("dsar_dag_id_fkey", type_="foreignkey") + batch_op.drop_constraint("dsar_asset_fkey", type_="foreignkey") + + _rename_index( + batch_op=batch_op, + original_name="idx_dag_schedule_asset_reference_dag_id", + new_name="idx_dag_schedule_dataset_reference_dag_id", + columns=["dag_id"], + unique=False, + ) + _rename_pk_constraint( + batch_op=batch_op, + original_name="dsar_pkey", + new_name="dsdr_pkey", + columns=["dataset_id", "dag_id"], + ) + + batch_op.create_foreign_key( + constraint_name="dsdr_dag_id_fkey", + referent_table="dag", + local_cols=["dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="dsdr_dataset_fkey", + referent_table="dataset", + local_cols=["dataset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("task_outlet_dataset_reference", schema=None) as batch_op: + batch_op.alter_column("asset_id", new_column_name="dataset_id", type_=sa.Integer(), nullable=False) + + batch_op.drop_constraint("toar_asset_fkey", type_="foreignkey") + batch_op.drop_constraint("toar_dag_id_fkey", type_="foreignkey") + + _rename_index( + batch_op=batch_op, + original_name="idx_task_outlet_asset_reference_dag_id", + new_name="idx_task_outlet_dataset_reference_dag_id", + columns=["dag_id"], + unique=False, + ) + _rename_pk_constraint( + batch_op=batch_op, + original_name="toar_pkey", + new_name="todr_pkey", + columns=["dataset_id", "dag_id", "task_id"], + ) + + batch_op.create_foreign_key( + constraint_name="todr_dataset_fkey", + referent_table="dataset", + local_cols=["dataset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="todr_dag_id_fkey", + referent_table="dag", + local_cols=["dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dataset_dag_run_queue", schema=None) as batch_op: + batch_op.alter_column("asset_id", new_column_name="dataset_id", type_=sa.Integer(), nullable=False) + + batch_op.drop_constraint("adrq_asset_fkey", type_="foreignkey") + batch_op.drop_constraint("adrq_dag_fkey", type_="foreignkey") + + _rename_pk_constraint( + batch_op=batch_op, + original_name="assetdagrunqueue_pkey", + new_name="datasetdagrunqueue_pkey", + columns=["dataset_id", "target_dag_id"], + ) + _rename_index( + batch_op=batch_op, + original_name="idx_asset_dag_run_queue_target_dag_id", + new_name="idx_dataset_dag_run_queue_target_dag_id", + columns=["target_dag_id"], + unique=False, + ) + + batch_op.create_foreign_key( + constraint_name="ddrq_dataset_fkey", + referent_table="dataset", + local_cols=["dataset_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + batch_op.create_foreign_key( + constraint_name="ddrq_dag_fkey", + referent_table="dag", + local_cols=["target_dag_id"], + remote_cols=["dag_id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dagrun_dataset_event", schema=None) as batch_op: + batch_op.drop_constraint(op.f("dagrun_asset_event_event_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dagrun_asset_events_event_id", + new_name="idx_dagrun_dataset_events_event_id", + columns=["event_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name="dagrun_dataset_event_event_id_fkey", + referent_table="dataset_event", + local_cols=["event_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + batch_op.drop_constraint(op.f("dagrun_asset_event_dag_run_id_fkey"), type_="foreignkey") + _rename_index( + batch_op=batch_op, + original_name="idx_dagrun_asset_events_dag_run_id", + new_name="idx_dagrun_dataset_events_dag_run_id", + columns=["dag_run_id"], + unique=False, + ) + batch_op.create_foreign_key( + constraint_name="dagrun_dataset_event_dag_run_id_fkey", + referent_table="dag_run", + local_cols=["dag_run_id"], + remote_cols=["id"], + ondelete="CASCADE", + ) + + with op.batch_alter_table("dataset_event", schema=None) as batch_op: + batch_op.alter_column("asset_id", new_column_name="dataset_id", type_=sa.Integer(), nullable=False) + + with op.batch_alter_table("dataset_event", schema=None) as batch_op: + _rename_index( + batch_op=batch_op, + original_name="idx_asset_id_timestamp", + new_name="idx_dataset_id_timestamp", + columns=["dataset_id", "timestamp"], + unique=False, + ) + + with op.batch_alter_table("dataset_alias", schema=None) as batch_op: + _rename_index( + batch_op=batch_op, + original_name="idx_asset_alias_name_unique", + new_name="idx_dataset_alias_name_unique", + columns=["name"], + unique=True, + ) + + with op.batch_alter_table("dag", schema=None) as batch_op: + batch_op.alter_column( + "asset_expression", + new_column_name="dataset_expression", + type_=sqlalchemy_jsonfield.JSONField(json=json), + ) diff --git a/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py b/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py new file mode 100644 index 0000000000000..2abd2116f989a --- /dev/null +++ b/airflow/migrations/versions/0042_3_0_0_add_uuid_primary_key_to_task_instance_.py @@ -0,0 +1,281 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add UUID primary key to ``task_instance`` table. + +Revision ID: d59cbbef95eb +Revises: 05234396c6fc +Create Date: 2024-10-21 22:39:12.394079 +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op +from sqlalchemy import text +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "d59cbbef95eb" +down_revision = "05234396c6fc" +branch_labels = "None" +depends_on = None +airflow_version = "3.0.0" + +###### +# The following functions to create UUID v7 are solely for the purpose of this migration. +# This is done for production databases that do not support UUID v7 natively (Postgres, MySQL) +# and used instead of uuids from +# python libraries like uuid6.uuid7() for performance reasons since the task_instance table +# can be very large. +###### + +# PostgreSQL-specific UUID v7 function +pg_uuid7_fn = """ +DO $$ +DECLARE + pgcrypto_installed BOOLEAN; +BEGIN + -- Check if pgcrypto is already installed + pgcrypto_installed := EXISTS (SELECT 1 FROM pg_extension WHERE extname = 'pgcrypto'); + + -- Attempt to create pgcrypto if it is not installed + IF NOT pgcrypto_installed THEN + BEGIN + CREATE EXTENSION pgcrypto; + pgcrypto_installed := TRUE; + RAISE NOTICE 'pgcrypto extension successfully created.'; + EXCEPTION + WHEN insufficient_privilege THEN + RAISE NOTICE 'pgcrypto extension could not be installed due to insufficient privileges; using fallback'; + pgcrypto_installed := FALSE; + WHEN OTHERS THEN + RAISE NOTICE 'An unexpected error occurred while attempting to install pgcrypto; using fallback'; + pgcrypto_installed := FALSE; + END; + END IF; +END $$; + +CREATE OR REPLACE FUNCTION uuid_generate_v7(p_timestamp timestamp with time zone) +RETURNS uuid +LANGUAGE plpgsql +PARALLEL SAFE +AS $$ +DECLARE + unix_time_ms CONSTANT bytea NOT NULL DEFAULT substring(int8send((extract(epoch FROM p_timestamp) * 1000)::bigint) from 3); + buffer bytea; + pgcrypto_installed BOOLEAN := EXISTS (SELECT 1 FROM pg_extension WHERE extname = 'pgcrypto'); +BEGIN + -- Use pgcrypto if available, otherwise use the fallback + -- fallback from https://brandur.org/fragments/secure-bytes-without-pgcrypto + IF pgcrypto_installed THEN + buffer := unix_time_ms || gen_random_bytes(10); + ELSE + buffer := unix_time_ms || substring(uuid_send(gen_random_uuid()) FROM 1 FOR 5) || + substring(uuid_send(gen_random_uuid()) FROM 12 FOR 5); + END IF; + + -- Set UUID version and variant bits + buffer := set_byte(buffer, 6, (b'0111' || get_byte(buffer, 6)::bit(4))::bit(8)::int); + buffer := set_byte(buffer, 8, (b'10' || get_byte(buffer, 8)::bit(6))::bit(8)::int); + RETURN encode(buffer, 'hex')::uuid; +END +$$; +""" + +pg_uuid7_fn_drop = """ +DROP FUNCTION IF EXISTS uuid_generate_v7(timestamp with time zone); +""" + +# MySQL-specific UUID v7 function +mysql_uuid7_fn = """ +DROP FUNCTION IF EXISTS uuid_generate_v7; +CREATE FUNCTION uuid_generate_v7(p_timestamp DATETIME(3)) +RETURNS CHAR(36) +DETERMINISTIC +BEGIN + DECLARE unix_time_ms BIGINT; + DECLARE time_hex CHAR(12); + DECLARE rand_hex CHAR(24); + DECLARE uuid CHAR(36); + + -- Convert the passed timestamp to milliseconds since epoch + SET unix_time_ms = UNIX_TIMESTAMP(p_timestamp) * 1000; + SET time_hex = LPAD(HEX(unix_time_ms), 12, '0'); + SET rand_hex = CONCAT( + LPAD(HEX(FLOOR(RAND() * POW(2,32))), 8, '0'), + LPAD(HEX(FLOOR(RAND() * POW(2,32))), 8, '0') + ); + SET rand_hex = CONCAT(SUBSTRING(rand_hex, 1, 4), '7', SUBSTRING(rand_hex, 6)); + SET rand_hex = CONCAT(SUBSTRING(rand_hex, 1, 12), '8', SUBSTRING(rand_hex, 14)); + + SET uuid = LOWER(CONCAT( + SUBSTRING(time_hex, 1, 8), '-', + SUBSTRING(time_hex, 9, 4), '-', + SUBSTRING(rand_hex, 1, 4), '-', + SUBSTRING(rand_hex, 5, 4), '-', + SUBSTRING(rand_hex, 9) + )); + + RETURN uuid; +END; +""" + +mysql_uuid7_fn_drop = """ +DROP FUNCTION IF EXISTS uuid_generate_v7; +""" + +ti_table = "task_instance" + +# Foreign key columns from task_instance +ti_fk_cols = ["dag_id", "task_id", "run_id", "map_index"] + +# Foreign key constraints from other tables to task_instance +ti_fk_constraints = [ + {"table": "rendered_task_instance_fields", "fk": "rtif_ti_fkey"}, + {"table": "task_fail", "fk": "task_fail_ti_fkey"}, + {"table": "task_instance_history", "fk": "task_instance_history_ti_fkey"}, + {"table": "task_instance_note", "fk": "task_instance_note_ti_fkey"}, + {"table": "task_map", "fk": "task_map_task_instance_fkey"}, + {"table": "task_reschedule", "fk": "task_reschedule_ti_fkey"}, + {"table": "xcom", "fk": "xcom_task_instance_fkey"}, +] + + +def _get_type_id_column(dialect_name: str) -> sa.types.TypeEngine: + # For PostgreSQL, use the UUID type directly as it is more efficient + if dialect_name == "postgresql": + return postgresql.UUID(as_uuid=False) + # For other databases, use String(36) to match UUID format + else: + return sa.String(36) + + +def upgrade(): + """Add UUID primary key to task instance table.""" + conn = op.get_bind() + dialect_name = conn.dialect.name + + op.add_column("task_instance", sa.Column("id", _get_type_id_column(dialect_name), nullable=True)) + + if dialect_name == "postgresql": + op.execute(pg_uuid7_fn) + + # TODO: Add batching to handle updates in smaller chunks for large tables to avoid locking + # Migrate existing rows with UUID v7 using a timestamp-based generation + op.execute( + "UPDATE task_instance SET id = uuid_generate_v7(coalesce(queued_dttm, start_date, clock_timestamp()))" + ) + + op.execute(pg_uuid7_fn_drop) + + # Drop existing primary key constraint to task_instance table + op.execute("ALTER TABLE IF EXISTS task_instance DROP CONSTRAINT task_instance_pkey CASCADE") + + elif dialect_name == "mysql": + op.execute(mysql_uuid7_fn) + + # Migrate existing rows with UUID v7 + op.execute(""" + UPDATE task_instance + SET id = uuid_generate_v7(coalesce(queued_dttm, start_date, NOW(3))) + WHERE id IS NULL + """) + + # Drop this function as it is no longer needed + op.execute(mysql_uuid7_fn_drop) + for fk in ti_fk_constraints: + op.drop_constraint(fk["fk"], fk["table"], type_="foreignkey") + with op.batch_alter_table("task_instance") as batch_op: + batch_op.drop_constraint("task_instance_pkey", type_="primary") + elif dialect_name == "sqlite": + from uuid6 import uuid7 + + stmt = text("SELECT COUNT(*) FROM task_instance WHERE id IS NULL") + conn = op.get_bind() + task_instances = conn.execute(stmt).scalar() + uuid_values = [str(uuid7()) for _ in range(task_instances)] + + # Ensure `uuid_values` is a list or iterable with the UUIDs for the update. + stmt = text(""" + UPDATE task_instance + SET id = :uuid + WHERE id IS NULL + """) + + for uuid_value in uuid_values: + conn.execute(stmt.bindparams(uuid=uuid_value)) + + with op.batch_alter_table("task_instance") as batch_op: + batch_op.drop_constraint("task_instance_pkey", type_="primary") + + # Add primary key and unique constraint to task_instance table + with op.batch_alter_table("task_instance") as batch_op: + batch_op.alter_column("id", type_=_get_type_id_column(dialect_name), nullable=False) + batch_op.create_unique_constraint("task_instance_composite_key", ti_fk_cols) + batch_op.create_primary_key("task_instance_pkey", ["id"]) + + # Create foreign key constraints + for fk in ti_fk_constraints: + with op.batch_alter_table(fk["table"]) as batch_op: + batch_op.create_foreign_key( + constraint_name=fk["fk"], + referent_table=ti_table, + local_cols=ti_fk_cols, + remote_cols=ti_fk_cols, + ondelete="CASCADE", + ) + + +def downgrade(): + """Drop UUID primary key to task instance table.""" + conn = op.get_bind() + dialect_name = conn.dialect.name + + if dialect_name == "postgresql": + op.execute("ALTER TABLE IF EXISTS task_instance DROP CONSTRAINT task_instance_composite_key CASCADE") + op.execute(pg_uuid7_fn_drop) + + elif dialect_name == "mysql": + for fk in ti_fk_constraints: + op.drop_constraint(fk["fk"], fk["table"], type_="foreignkey") + + with op.batch_alter_table("task_instance") as batch_op: + batch_op.drop_constraint("task_instance_composite_key", type_="unique") + op.execute(mysql_uuid7_fn_drop) + + elif dialect_name == "sqlite": + with op.batch_alter_table("task_instance") as batch_op: + batch_op.drop_constraint("task_instance_composite_key", type_="unique") + + with op.batch_alter_table("task_instance") as batch_op: + batch_op.drop_constraint("task_instance_pkey", type_="primary") + batch_op.drop_column("id") + batch_op.create_primary_key("task_instance_pkey", ti_fk_cols) + + # Re-add foreign key constraints + for fk in ti_fk_constraints: + with op.batch_alter_table(fk["table"]) as batch_op: + batch_op.create_foreign_key( + constraint_name=fk["fk"], + referent_table=ti_table, + local_cols=ti_fk_cols, + remote_cols=ti_fk_cols, + ondelete="CASCADE", + ) diff --git a/airflow/utils/compression.py b/airflow/migrations/versions/0043_3_0_0_remove_scheduler_lock_column.py similarity index 51% rename from airflow/utils/compression.py rename to airflow/migrations/versions/0043_3_0_0_remove_scheduler_lock_column.py index 8f4946346d636..12d1b16d9729d 100644 --- a/airflow/utils/compression.py +++ b/airflow/migrations/versions/0043_3_0_0_remove_scheduler_lock_column.py @@ -15,26 +15,35 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +""" +remove scheduler_lock column. + +Revision ID: 486ac7936b78 +Revises: d59cbbef95eb +Create Date: 2024-10-23 07:48:52.494396 + +""" + from __future__ import annotations -import bz2 -import gzip -import shutil -from tempfile import NamedTemporaryFile - - -def uncompress_file(input_file_name, file_extension, dest_dir): - """Uncompress gz and bz2 files.""" - if file_extension.lower() not in (".gz", ".bz2"): - raise NotImplementedError( - f"Received {file_extension} format. Only gz and bz2 files can currently be uncompressed." - ) - if file_extension.lower() == ".gz": - fmodule = gzip.GzipFile - elif file_extension.lower() == ".bz2": - fmodule = bz2.BZ2File - with fmodule(input_file_name, mode="rb") as f_compressed, NamedTemporaryFile( - dir=dest_dir, mode="wb", delete=False - ) as f_uncompressed: - shutil.copyfileobj(f_compressed, f_uncompressed) - return f_uncompressed.name +import sqlalchemy as sa +from alembic import op + +revision = "486ac7936b78" +down_revision = "d59cbbef95eb" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + """Apply remove scheduler_lock column.""" + with op.batch_alter_table("dag", schema=None) as batch_op: + batch_op.drop_column("scheduler_lock") + + +def downgrade(): + """Unapply remove scheduler_lock column.""" + with op.batch_alter_table("dag", schema=None) as batch_op: + batch_op.add_column(sa.Column("scheduler_lock", sa.BOOLEAN(), autoincrement=False, nullable=True)) diff --git a/airflow/migrations/versions/0044_3_0_0__drop_task_fail_table.py b/airflow/migrations/versions/0044_3_0_0__drop_task_fail_table.py new file mode 100644 index 0000000000000..1e499218262dd --- /dev/null +++ b/airflow/migrations/versions/0044_3_0_0__drop_task_fail_table.py @@ -0,0 +1,75 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Drop task_fail table. + +Revision ID: 5f57a45b8433 +Revises: 486ac7936b78 +Create Date: 2024-10-29 17:49:27.740730 +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.migrations.db_types import TIMESTAMP, StringID + +# revision identifiers, used by Alembic. +revision = "5f57a45b8433" +down_revision = "486ac7936b78" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + """Apply Drop task_fail table.""" + op.drop_table("task_fail") + + +def downgrade(): + """Re-add task_fail table.""" + op.create_table( + "task_fail", + sa.Column("id", sa.Integer(), primary_key=True, nullable=False), + sa.Column("task_id", StringID(length=250), nullable=False), + sa.Column("dag_id", StringID(length=250), nullable=False), + sa.Column("run_id", StringID(length=250), nullable=False), + sa.Column("map_index", sa.Integer(), server_default=sa.text("-1"), nullable=False), + sa.Column("start_date", TIMESTAMP(timezone=True), nullable=True), + sa.Column("end_date", TIMESTAMP(timezone=True), nullable=True), + sa.Column("duration", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["dag_id", "task_id", "run_id", "map_index"], + [ + "task_instance.dag_id", + "task_instance.task_id", + "task_instance.run_id", + "task_instance.map_index", + ], + name="task_fail_ti_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name=op.f("task_fail_pkey")), + ) + with op.batch_alter_table("task_fail", schema=None) as batch_op: + batch_op.create_index( + "idx_task_fail_task_instance", ["dag_id", "task_id", "run_id", "map_index"], unique=False + ) diff --git a/airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py b/airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py new file mode 100644 index 0000000000000..47e72de9dcb4b --- /dev/null +++ b/airflow/migrations/versions/0045_3_0_0_add_last_heartbeat_at_directly_to_ti.py @@ -0,0 +1,60 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Add last_heartbeat_at directly to TI. + +Revision ID: d8cd3297971e +Revises: 5f57a45b8433 +Create Date: 2024-11-01 12:14:59.927266 + +""" + +from __future__ import annotations + +import sqlalchemy as sa +from alembic import op + +from airflow.migrations.db_types import TIMESTAMP + +# revision identifiers, used by Alembic. +revision = "d8cd3297971e" +down_revision = "5f57a45b8433" +branch_labels = None +depends_on = None +airflow_version = "3.0.0" + + +def upgrade(): + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.add_column(sa.Column("last_heartbeat_at", TIMESTAMP(timezone=True), nullable=True)) + batch_op.drop_index("ti_job_id") + batch_op.create_index("ti_heartbeat", ["last_heartbeat_at"], unique=False) + batch_op.drop_column("job_id") + with op.batch_alter_table("task_instance_history", schema=None) as batch_op: + batch_op.drop_column("job_id") + + +def downgrade(): + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.add_column(sa.Column("job_id", sa.INTEGER(), autoincrement=False, nullable=True)) + batch_op.drop_index("ti_heartbeat") + batch_op.create_index("ti_job_id", ["job_id"], unique=False) + batch_op.drop_column("last_heartbeat_at") + with op.batch_alter_table("task_instance_history", schema=None) as batch_op: + batch_op.add_column(sa.Column("job_id", sa.INTEGER(), autoincrement=False, nullable=True)) diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 1a998d60c5c92..7e71dddc65dfe 100644 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -41,7 +41,6 @@ "Pool", "RenderedTaskInstanceFields", "SkipMixin", - "TaskFail", "TaskInstance", "TaskReschedule", "Trigger", @@ -103,7 +102,6 @@ def __getattr__(name): "Pool": "airflow.models.pool", "RenderedTaskInstanceFields": "airflow.models.renderedtifields", "SkipMixin": "airflow.models.skipmixin", - "TaskFail": "airflow.models.taskfail", "TaskInstance": "airflow.models.taskinstance", "TaskReschedule": "airflow.models.taskreschedule", "Trigger": "airflow.models.trigger", @@ -132,7 +130,6 @@ def __getattr__(name): from airflow.models.pool import Pool from airflow.models.renderedtifields import RenderedTaskInstanceFields from airflow.models.skipmixin import SkipMixin - from airflow.models.taskfail import TaskFail from airflow.models.taskinstance import TaskInstance, clear_task_instances from airflow.models.taskinstancehistory import TaskInstanceHistory from airflow.models.taskreschedule import TaskReschedule diff --git a/airflow/models/abstractoperator.py b/airflow/models/abstractoperator.py index 45eb3c5fff189..feafb0b6b637d 100644 --- a/airflow/models/abstractoperator.py +++ b/airflow/models/abstractoperator.py @@ -19,9 +19,8 @@ import datetime import inspect -from abc import abstractproperty from functools import cached_property -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Collection, Iterable, Iterator, Sequence +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Sequence import methodtools from sqlalchemy import select @@ -29,7 +28,7 @@ from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.models.expandinput import NotFullyPopulated -from airflow.models.taskmixin import DAGNode, DependencyMixin +from airflow.sdk.definitions.abstractoperator import AbstractOperator as TaskSDKAbstractOperator from airflow.template.templater import Templater from airflow.utils.context import Context from airflow.utils.db import exists_query @@ -39,25 +38,26 @@ from airflow.utils.state import State, TaskInstanceState from airflow.utils.task_group import MappedTaskGroup from airflow.utils.trigger_rule import TriggerRule -from airflow.utils.types import NOTSET, ArgNotSet from airflow.utils.weight_rule import WeightRule -TaskStateChangeCallback = Callable[[Context], None] - if TYPE_CHECKING: + from collections.abc import Mapping + import jinja2 # Slow import. from sqlalchemy.orm import Session - from airflow.models.baseoperator import BaseOperator from airflow.models.baseoperatorlink import BaseOperatorLink - from airflow.models.dag import DAG + from airflow.models.dag import DAG as SchedulerDAG from airflow.models.mappedoperator import MappedOperator - from airflow.models.operator import Operator from airflow.models.taskinstance import TaskInstance + from airflow.sdk import DAG, BaseOperator + from airflow.sdk.definitions.node import DAGNode from airflow.task.priority_strategy import PriorityWeightStrategy from airflow.triggers.base import StartTriggerArgs from airflow.utils.task_group import TaskGroup +TaskStateChangeCallback = Callable[[Context], None] + DEFAULT_OWNER: str = conf.get_mandatory_value("operators", "default_owner") DEFAULT_POOL_SLOTS: int = 1 DEFAULT_PRIORITY_WEIGHT: int = 1 @@ -86,7 +86,7 @@ class NotMapped(Exception): """Raise if a task is neither mapped nor has any parent mapped groups.""" -class AbstractOperator(Templater, DAGNode): +class AbstractOperator(Templater, TaskSDKAbstractOperator): """ Common implementation for operators, including unmapped and mapped. @@ -100,101 +100,8 @@ class AbstractOperator(Templater, DAGNode): :meta private: """ - operator_class: type[BaseOperator] | dict[str, Any] - - weight_rule: PriorityWeightStrategy - priority_weight: int - - # Defines the operator level extra links. - operator_extra_links: Collection[BaseOperatorLink] - - owner: str - task_id: str - - outlets: list - inlets: list trigger_rule: TriggerRule - _needs_expansion: bool | None = None - _on_failure_fail_dagrun = False - - HIDE_ATTRS_FROM_UI: ClassVar[frozenset[str]] = frozenset( - ( - "log", - "dag", # We show dag_id, don't need to show this too - "node_id", # Duplicates task_id - "task_group", # Doesn't have a useful repr, no point showing in UI - "inherits_from_empty_operator", # impl detail - # Decide whether to start task execution from triggerer - "start_trigger_args", - "start_from_trigger", - # For compatibility with TG, for operators these are just the current task, no point showing - "roots", - "leaves", - # These lists are already shown via *_task_ids - "upstream_list", - "downstream_list", - # Not useful, implementation detail, already shown elsewhere - "global_operator_extra_link_dict", - "operator_extra_link_dict", - ) - ) - - def get_dag(self) -> DAG | None: - raise NotImplementedError() - - @property - def task_type(self) -> str: - raise NotImplementedError() - - @property - def operator_name(self) -> str: - raise NotImplementedError() - - @property - def inherits_from_empty_operator(self) -> bool: - raise NotImplementedError() - - @property - def dag_id(self) -> str: - """Returns dag id if it has one or an adhoc + owner.""" - dag = self.get_dag() - if dag: - return dag.dag_id - return f"adhoc_{self.owner}" - - @property - def node_id(self) -> str: - return self.task_id - - @abstractproperty - def task_display_name(self) -> str: ... - - @property - def label(self) -> str | None: - if self.task_display_name and self.task_display_name != self.task_id: - return self.task_display_name - # Prefix handling if no display is given is cloned from taskmixin for compatibility - tg = self.task_group - if tg and tg.node_id and tg.prefix_group_id: - # "task_group_id.task_id" -> "task_id" - return self.task_id[len(tg.node_id) + 1 :] - return self.task_id - - @property - def is_setup(self) -> bool: - raise NotImplementedError() - - @is_setup.setter - def is_setup(self, value: bool) -> None: - raise NotImplementedError() - - @property - def is_teardown(self) -> bool: - raise NotImplementedError() - - @is_teardown.setter - def is_teardown(self, value: bool) -> None: - raise NotImplementedError() + weight_rule: PriorityWeightStrategy @property def on_failure_fail_dagrun(self): @@ -219,113 +126,71 @@ def on_failure_fail_dagrun(self, value): ) self._on_failure_fail_dagrun = value - def as_setup(self): - self.is_setup = True - return self + def get_template_env(self, dag: DAG | None = None) -> jinja2.Environment: + """Get the template environment for rendering templates.""" + if dag is None: + dag = self.get_dag() + return super().get_template_env(dag=dag) - def as_teardown( + def _render(self, template, context, dag: DAG | None = None): + if dag is None: + dag = self.get_dag() + return super()._render(template, context, dag=dag) + + def _do_render_template_fields( self, - *, - setups: BaseOperator | Iterable[BaseOperator] | ArgNotSet = NOTSET, - on_failure_fail_dagrun=NOTSET, - ): - self.is_teardown = True - self.trigger_rule = TriggerRule.ALL_DONE_SETUP_SUCCESS - if on_failure_fail_dagrun is not NOTSET: - self.on_failure_fail_dagrun = on_failure_fail_dagrun - if not isinstance(setups, ArgNotSet): - setups = [setups] if isinstance(setups, DependencyMixin) else setups - for s in setups: - s.is_setup = True - s >> self - return self - - def get_direct_relative_ids(self, upstream: bool = False) -> set[str]: - """Get direct relative IDs to the current task, upstream or downstream.""" - if upstream: - return self.upstream_task_ids - return self.downstream_task_ids - - def get_flat_relative_ids(self, *, upstream: bool = False) -> set[str]: - """ - Get a flat set of relative IDs, upstream or downstream. - - Will recurse each relative found in the direction specified. - - :param upstream: Whether to look for upstream or downstream relatives. - """ - dag = self.get_dag() - if not dag: - return set() - - relatives: set[str] = set() - - # This is intentionally implemented as a loop, instead of calling - # get_direct_relative_ids() recursively, since Python has significant - # limitation on stack level, and a recursive implementation can blow up - # if a DAG contains very long routes. - task_ids_to_trace = self.get_direct_relative_ids(upstream) - while task_ids_to_trace: - task_ids_to_trace_next: set[str] = set() - for task_id in task_ids_to_trace: - if task_id in relatives: + parent: Any, + template_fields: Iterable[str], + context: Mapping[str, Any], + jinja_env: jinja2.Environment, + seen_oids: set[int], + ) -> None: + """Override the base to use custom error logging.""" + for attr_name in template_fields: + try: + value = getattr(parent, attr_name) + except AttributeError: + raise AttributeError( + f"{attr_name!r} is configured as a template field " + f"but {parent.task_type} does not have this attribute." + ) + try: + if not value: continue - task_ids_to_trace_next.update(dag.task_dict[task_id].get_direct_relative_ids(upstream)) - relatives.add(task_id) - task_ids_to_trace = task_ids_to_trace_next - - return relatives - - def get_flat_relatives(self, upstream: bool = False) -> Collection[Operator]: - """Get a flat list of relatives, either upstream or downstream.""" - dag = self.get_dag() - if not dag: - return set() - return [dag.task_dict[task_id] for task_id in self.get_flat_relative_ids(upstream=upstream)] - - def get_upstreams_follow_setups(self) -> Iterable[Operator]: - """All upstreams and, for each upstream setup, its respective teardowns.""" - for task in self.get_flat_relatives(upstream=True): - yield task - if task.is_setup: - for t in task.downstream_list: - if t.is_teardown and t != self: - yield t - - def get_upstreams_only_setups_and_teardowns(self) -> Iterable[Operator]: - """ - Only *relevant* upstream setups and their teardowns. - - This method is meant to be used when we are clearing the task (non-upstream) and we need - to add in the *relevant* setups and their teardowns. - - Relevant in this case means, the setup has a teardown that is downstream of ``self``, - or the setup has no teardowns. - """ - downstream_teardown_ids = { - x.task_id for x in self.get_flat_relatives(upstream=False) if x.is_teardown - } - for task in self.get_flat_relatives(upstream=True): - if not task.is_setup: - continue - has_no_teardowns = not any(True for x in task.downstream_list if x.is_teardown) - # if task has no teardowns or has teardowns downstream of self - if has_no_teardowns or task.downstream_task_ids.intersection(downstream_teardown_ids): - yield task - for t in task.downstream_list: - if t.is_teardown and t != self: - yield t - - def get_upstreams_only_setups(self) -> Iterable[Operator]: - """ - Return relevant upstream setups. + except Exception: + # This may happen if the templated field points to a class which does not support `__bool__`, + # such as Pandas DataFrames: + # https://github.com/pandas-dev/pandas/blob/9135c3aaf12d26f857fcc787a5b64d521c51e379/pandas/core/generic.py#L1465 + self.log.info( + "Unable to check if the value of type '%s' is False for task '%s', field '%s'.", + type(value).__name__, + self.task_id, + attr_name, + ) + # We may still want to render custom classes which do not support __bool__ + pass - This method is meant to be used when we are checking task dependencies where we need - to wait for all the upstream setups to complete before we can run the task. - """ - for task in self.get_upstreams_only_setups_and_teardowns(): - if task.is_setup: - yield task + try: + if callable(value): + rendered_content = value(context=context, jinja_env=jinja_env) + else: + rendered_content = self.render_template( + value, + context, + jinja_env, + seen_oids, + ) + except Exception: + value_masked = redact(name=attr_name, value=value) + self.log.exception( + "Exception rendering Jinja template for task '%s', field '%s'. Template: %r", + self.task_id, + attr_name, + value_masked, + ) + raise + else: + setattr(parent, attr_name, rendered_content) def _iter_all_mapped_downstreams(self) -> Iterator[MappedOperator | MappedTaskGroup]: """ @@ -394,7 +259,9 @@ def iter_mapped_task_groups(self) -> Iterator[MappedTaskGroup]: """ if (group := self.task_group) is None: return - yield from group.iter_mapped_task_groups() + # TODO: Task-SDK: this type ignore shouldn't be necessary, revisit once mapping support is fully in the + # SDK + yield from group.iter_mapped_task_groups() # type: ignore[misc] def get_closest_mapped_task_group(self) -> MappedTaskGroup | None: """ @@ -460,6 +327,7 @@ def priority_weight_total(self) -> int: - WeightRule.DOWNSTREAM - adds priority weight of all downstream tasks - WeightRule.UPSTREAM - adds priority weight of all upstream tasks """ + # TODO: This should live in the WeightStragies themselves, not in here from airflow.task.priority_strategy import ( _AbsolutePriorityWeightStrategy, _DownstreamPriorityWeightStrategy, @@ -587,9 +455,9 @@ def expand_mapped_task(self, run_id: str, *, session: Session) -> tuple[Sequence """ from sqlalchemy import func, or_ - from airflow.models.baseoperator import BaseOperator from airflow.models.mappedoperator import MappedOperator from airflow.models.taskinstance import TaskInstance + from airflow.sdk import BaseOperator from airflow.settings import task_instance_mutation_hook if not isinstance(self, (BaseOperator, MappedOperator)): @@ -624,6 +492,9 @@ def expand_mapped_task(self, run_id: str, *, session: Session) -> tuple[Sequence all_expanded_tis: list[TaskInstance] = [] if unmapped_ti: + if TYPE_CHECKING: + assert self.dag is None or isinstance(self.dag, SchedulerDAG) + # The unmapped task instance still exists and is unfinished, i.e. we # haven't tried to run it before. if total_length is None: @@ -721,72 +592,6 @@ def render_template_fields( """ raise NotImplementedError() - def _render(self, template, context, dag: DAG | None = None): - if dag is None: - dag = self.get_dag() - return super()._render(template, context, dag=dag) - - def get_template_env(self, dag: DAG | None = None) -> jinja2.Environment: - """Get the template environment for rendering templates.""" - if dag is None: - dag = self.get_dag() - return super().get_template_env(dag=dag) - - def _do_render_template_fields( - self, - parent: Any, - template_fields: Iterable[str], - context: Context, - jinja_env: jinja2.Environment, - seen_oids: set[int], - ) -> None: - """Override the base to use custom error logging.""" - for attr_name in template_fields: - try: - value = getattr(parent, attr_name) - except AttributeError: - raise AttributeError( - f"{attr_name!r} is configured as a template field " - f"but {parent.task_type} does not have this attribute." - ) - try: - if not value: - continue - except Exception: - # This may happen if the templated field points to a class which does not support `__bool__`, - # such as Pandas DataFrames: - # https://github.com/pandas-dev/pandas/blob/9135c3aaf12d26f857fcc787a5b64d521c51e379/pandas/core/generic.py#L1465 - self.log.info( - "Unable to check if the value of type '%s' is False for task '%s', field '%s'.", - type(value).__name__, - self.task_id, - attr_name, - ) - # We may still want to render custom classes which do not support __bool__ - pass - - try: - if callable(value): - rendered_content = value(context=context, jinja_env=jinja_env) - else: - rendered_content = self.render_template( - value, - context, - jinja_env, - seen_oids, - ) - except Exception: - value_masked = redact(name=attr_name, value=value) - self.log.exception( - "Exception rendering Jinja template for task '%s', field '%s'. Template: %r", - self.task_id, - attr_name, - value_masked, - ) - raise - else: - setattr(parent, attr_name, rendered_content) - def __enter__(self): if not self.is_setup and not self.is_teardown: raise AirflowException("Only setup/teardown tasks can be used as context managers.") diff --git a/airflow/models/asset.py b/airflow/models/asset.py index 79f9b7389439d..fdfb55143cb35 100644 --- a/airflow/models/asset.py +++ b/airflow/models/asset.py @@ -40,45 +40,21 @@ from airflow.utils.sqlalchemy import UtcDateTime alias_association_table = Table( - "dataset_alias_dataset", + "asset_alias_asset", Base.metadata, - Column("alias_id", ForeignKey("dataset_alias.id", ondelete="CASCADE"), primary_key=True), - Column("dataset_id", ForeignKey("dataset.id", ondelete="CASCADE"), primary_key=True), - Index("idx_dataset_alias_dataset_alias_id", "alias_id"), - Index("idx_dataset_alias_dataset_alias_dataset_id", "dataset_id"), - ForeignKeyConstraint( - ("alias_id",), - ["dataset_alias.id"], - name="ds_dsa_alias_id", - ondelete="CASCADE", - ), - ForeignKeyConstraint( - ("dataset_id",), - ["dataset.id"], - name="ds_dsa_dataset_id", - ondelete="CASCADE", - ), + Column("alias_id", ForeignKey("asset_alias.id", ondelete="CASCADE"), primary_key=True), + Column("asset_id", ForeignKey("asset.id", ondelete="CASCADE"), primary_key=True), + Index("idx_asset_alias_asset_alias_id", "alias_id"), + Index("idx_asset_alias_asset_asset_id", "asset_id"), ) -dataset_alias_dataset_event_assocation_table = Table( - "dataset_alias_dataset_event", +asset_alias_asset_event_assocation_table = Table( + "asset_alias_asset_event", Base.metadata, - Column("alias_id", ForeignKey("dataset_alias.id", ondelete="CASCADE"), primary_key=True), - Column("event_id", ForeignKey("dataset_event.id", ondelete="CASCADE"), primary_key=True), - Index("idx_dataset_alias_dataset_event_alias_id", "alias_id"), - Index("idx_dataset_alias_dataset_event_event_id", "event_id"), - ForeignKeyConstraint( - ("alias_id",), - ["dataset_alias.id"], - name="dss_de_alias_id", - ondelete="CASCADE", - ), - ForeignKeyConstraint( - ("event_id",), - ["dataset_event.id"], - name="dss_de_event_id", - ondelete="CASCADE", - ), + Column("alias_id", ForeignKey("asset_alias.id", ondelete="CASCADE"), primary_key=True), + Column("event_id", ForeignKey("asset_event.id", ondelete="CASCADE"), primary_key=True), + Index("idx_asset_alias_asset_event_alias_id", "alias_id"), + Index("idx_asset_alias_asset_event_event_id", "event_id"), ) @@ -116,23 +92,23 @@ class AssetAliasModel(Base): nullable=False, ) - __tablename__ = "dataset_alias" + __tablename__ = "asset_alias" __table_args__ = ( - Index("idx_dataset_alias_name_unique", name, unique=True), + Index("idx_asset_alias_name_unique", name, unique=True), {"sqlite_autoincrement": True}, # ensures PK values not reused ) - datasets = relationship( + assets = relationship( "AssetModel", secondary=alias_association_table, backref="aliases", ) - dataset_events = relationship( + asset_events = relationship( "AssetEvent", - secondary=dataset_alias_dataset_event_assocation_table, + secondary=asset_alias_asset_event_assocation_table, back_populates="source_aliases", ) - consuming_dags = relationship("DagScheduleAssetAliasReference", back_populates="dataset_alias") + consuming_dags = relationship("DagScheduleAssetAliasReference", back_populates="asset_alias") @classmethod def from_public(cls, obj: AssetAlias) -> AssetAliasModel: @@ -205,14 +181,14 @@ class AssetModel(Base): created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - active = relationship("AssetActive", uselist=False, viewonly=True) + active = relationship("AssetActive", uselist=False, viewonly=True, back_populates="asset") - consuming_dags = relationship("DagScheduleAssetReference", back_populates="dataset") - producing_tasks = relationship("TaskOutletAssetReference", back_populates="dataset") + consuming_dags = relationship("DagScheduleAssetReference", back_populates="asset") + producing_tasks = relationship("TaskOutletAssetReference", back_populates="asset") - __tablename__ = "dataset" + __tablename__ = "asset" __table_args__ = ( - Index("idx_dataset_name_uri_unique", name, uri, unique=True), + Index("idx_asset_name_uri_unique", name, uri, unique=True), {"sqlite_autoincrement": True}, # ensures PK values not reused ) @@ -245,7 +221,7 @@ def __hash__(self): return hash((self.name, self.uri)) def __repr__(self): - return f"{self.__class__.__name__}(uri={self.uri!r}, extra={self.extra!r})" + return f"{self.__class__.__name__}(name={self.name!r}, uri={self.uri!r}, extra={self.extra!r})" def to_public(self) -> Asset: return Asset(name=self.name, uri=self.uri, group=self.group, extra=self.extra) @@ -288,12 +264,14 @@ class AssetActive(Base): nullable=False, ) + asset = relationship("AssetModel", back_populates="active") + __tablename__ = "asset_active" __table_args__ = ( PrimaryKeyConstraint(name, uri, name="asset_active_pkey"), ForeignKeyConstraint( columns=[name, uri], - refcolumns=["dataset.name", "dataset.uri"], + refcolumns=["asset.name", "asset.uri"], name="asset_active_asset_name_uri_fkey", ondelete="CASCADE", ), @@ -314,25 +292,25 @@ class DagScheduleAssetAliasReference(Base): created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - dataset_alias = relationship("AssetAliasModel", back_populates="consuming_dags") - dag = relationship("DagModel", back_populates="schedule_dataset_alias_references") + asset_alias = relationship("AssetAliasModel", back_populates="consuming_dags") + dag = relationship("DagModel", back_populates="schedule_asset_alias_references") - __tablename__ = "dag_schedule_dataset_alias_reference" + __tablename__ = "dag_schedule_asset_alias_reference" __table_args__ = ( - PrimaryKeyConstraint(alias_id, dag_id, name="dsdar_pkey"), + PrimaryKeyConstraint(alias_id, dag_id, name="dsaar_pkey"), ForeignKeyConstraint( (alias_id,), - ["dataset_alias.id"], - name="dsdar_dataset_alias_fkey", + ["asset_alias.id"], + name="dsaar_asset_alias_fkey", ondelete="CASCADE", ), ForeignKeyConstraint( columns=(dag_id,), refcolumns=["dag.dag_id"], - name="dsdar_dag_fkey", + name="dsaar_dag_id_fkey", ondelete="CASCADE", ), - Index("idx_dag_schedule_dataset_alias_reference_dag_id", dag_id), + Index("idx_dag_schedule_asset_alias_reference_dag_id", dag_id), ) def __eq__(self, other): @@ -344,104 +322,99 @@ def __hash__(self): return hash(self.__mapper__.primary_key) def __repr__(self): - args = [] - for attr in [x.name for x in self.__mapper__.primary_key]: - args.append(f"{attr}={getattr(self, attr)!r}") + args = [f"{x.name}={getattr(self, x.name)!r}" for x in self.__mapper__.primary_key] return f"{self.__class__.__name__}({', '.join(args)})" class DagScheduleAssetReference(Base): """References from a DAG to an asset of which it is a consumer.""" - dataset_id = Column(Integer, primary_key=True, nullable=False) + asset_id = Column(Integer, primary_key=True, nullable=False) dag_id = Column(StringID(), primary_key=True, nullable=False) created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - dataset = relationship("AssetModel", back_populates="consuming_dags") - dag = relationship("DagModel", back_populates="schedule_dataset_references") + asset = relationship("AssetModel", back_populates="consuming_dags") + dag = relationship("DagModel", back_populates="schedule_asset_references") queue_records = relationship( "AssetDagRunQueue", primaryjoin="""and_( - DagScheduleAssetReference.dataset_id == foreign(AssetDagRunQueue.dataset_id), + DagScheduleAssetReference.asset_id == foreign(AssetDagRunQueue.asset_id), DagScheduleAssetReference.dag_id == foreign(AssetDagRunQueue.target_dag_id), )""", cascade="all, delete, delete-orphan", ) - __tablename__ = "dag_schedule_dataset_reference" + __tablename__ = "dag_schedule_asset_reference" __table_args__ = ( - PrimaryKeyConstraint(dataset_id, dag_id, name="dsdr_pkey"), + PrimaryKeyConstraint(asset_id, dag_id, name="dsar_pkey"), ForeignKeyConstraint( - (dataset_id,), - ["dataset.id"], - name="dsdr_dataset_fkey", + (asset_id,), + ["asset.id"], + name="dsar_asset_fkey", ondelete="CASCADE", ), ForeignKeyConstraint( columns=(dag_id,), refcolumns=["dag.dag_id"], - name="dsdr_dag_id_fkey", + name="dsar_dag_id_fkey", ondelete="CASCADE", ), - Index("idx_dag_schedule_dataset_reference_dag_id", dag_id), + Index("idx_dag_schedule_asset_reference_dag_id", dag_id), ) def __eq__(self, other): if isinstance(other, self.__class__): - return self.dataset_id == other.dataset_id and self.dag_id == other.dag_id - else: - return NotImplemented + return self.asset_id == other.asset_id and self.dag_id == other.dag_id + return NotImplemented def __hash__(self): return hash(self.__mapper__.primary_key) def __repr__(self): - args = [] - for attr in [x.name for x in self.__mapper__.primary_key]: - args.append(f"{attr}={getattr(self, attr)!r}") + args = [f"{attr}={getattr(self, attr)!r}" for attr in [x.name for x in self.__mapper__.primary_key]] return f"{self.__class__.__name__}({', '.join(args)})" class TaskOutletAssetReference(Base): """References from a task to an asset that it updates / produces.""" - dataset_id = Column(Integer, primary_key=True, nullable=False) + asset_id = Column(Integer, primary_key=True, nullable=False) dag_id = Column(StringID(), primary_key=True, nullable=False) task_id = Column(StringID(), primary_key=True, nullable=False) created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) updated_at = Column(UtcDateTime, default=timezone.utcnow, onupdate=timezone.utcnow, nullable=False) - dataset = relationship("AssetModel", back_populates="producing_tasks") + asset = relationship("AssetModel", back_populates="producing_tasks") - __tablename__ = "task_outlet_dataset_reference" + __tablename__ = "task_outlet_asset_reference" __table_args__ = ( ForeignKeyConstraint( - (dataset_id,), - ["dataset.id"], - name="todr_dataset_fkey", + (asset_id,), + ["asset.id"], + name="toar_asset_fkey", ondelete="CASCADE", ), - PrimaryKeyConstraint(dataset_id, dag_id, task_id, name="todr_pkey"), + PrimaryKeyConstraint(asset_id, dag_id, task_id, name="toar_pkey"), ForeignKeyConstraint( columns=(dag_id,), refcolumns=["dag.dag_id"], - name="todr_dag_id_fkey", + name="toar_dag_id_fkey", ondelete="CASCADE", ), - Index("idx_task_outlet_dataset_reference_dag_id", dag_id), + Index("idx_task_outlet_asset_reference_dag_id", dag_id), ) def __eq__(self, other): if isinstance(other, self.__class__): return ( - self.dataset_id == other.dataset_id + self.asset_id == other.asset_id and self.dag_id == other.dag_id and self.task_id == other.task_id ) - else: - return NotImplemented + + return NotImplemented def __hash__(self): return hash(self.__mapper__.primary_key) @@ -456,31 +429,32 @@ def __repr__(self): class AssetDagRunQueue(Base): """Model for storing asset events that need processing.""" - dataset_id = Column(Integer, primary_key=True, nullable=False) + asset_id = Column(Integer, primary_key=True, nullable=False) target_dag_id = Column(StringID(), primary_key=True, nullable=False) created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) - dataset = relationship("AssetModel", viewonly=True) - __tablename__ = "dataset_dag_run_queue" + asset = relationship("AssetModel", viewonly=True) + + __tablename__ = "asset_dag_run_queue" __table_args__ = ( - PrimaryKeyConstraint(dataset_id, target_dag_id, name="datasetdagrunqueue_pkey"), + PrimaryKeyConstraint(asset_id, target_dag_id, name="assetdagrunqueue_pkey"), ForeignKeyConstraint( - (dataset_id,), - ["dataset.id"], - name="ddrq_dataset_fkey", + (asset_id,), + ["asset.id"], + name="adrq_asset_fkey", ondelete="CASCADE", ), ForeignKeyConstraint( (target_dag_id,), ["dag.dag_id"], - name="ddrq_dag_fkey", + name="adrq_dag_fkey", ondelete="CASCADE", ), - Index("idx_dataset_dag_run_queue_target_dag_id", target_dag_id), + Index("idx_asset_dag_run_queue_target_dag_id", target_dag_id), ) def __eq__(self, other): if isinstance(other, self.__class__): - return self.dataset_id == other.dataset_id and self.target_dag_id == other.target_dag_id + return self.asset_id == other.asset_id and self.target_dag_id == other.target_dag_id else: return NotImplemented @@ -495,12 +469,12 @@ def __repr__(self): association_table = Table( - "dagrun_dataset_event", + "dagrun_asset_event", Base.metadata, Column("dag_run_id", ForeignKey("dag_run.id", ondelete="CASCADE"), primary_key=True), - Column("event_id", ForeignKey("dataset_event.id", ondelete="CASCADE"), primary_key=True), - Index("idx_dagrun_dataset_events_dag_run_id", "dag_run_id"), - Index("idx_dagrun_dataset_events_event_id", "event_id"), + Column("event_id", ForeignKey("asset_event.id", ondelete="CASCADE"), primary_key=True), + Index("idx_dagrun_asset_events_dag_run_id", "dag_run_id"), + Index("idx_dagrun_asset_events_event_id", "event_id"), ) @@ -508,7 +482,7 @@ class AssetEvent(Base): """ A table to store assets events. - :param dataset_id: reference to AssetModel record + :param asset_id: reference to AssetModel record :param extra: JSON field for arbitrary extra info :param source_task_id: the task_id of the TI which updated the asset :param source_dag_id: the dag_id of the TI which updated the asset @@ -521,7 +495,7 @@ class AssetEvent(Base): """ id = Column(Integer, primary_key=True, autoincrement=True) - dataset_id = Column(Integer, nullable=False) + asset_id = Column(Integer, nullable=False) extra = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=False, default={}) source_task_id = Column(StringID(), nullable=True) source_dag_id = Column(StringID(), nullable=True) @@ -529,22 +503,22 @@ class AssetEvent(Base): source_map_index = Column(Integer, nullable=True, server_default=text("-1")) timestamp = Column(UtcDateTime, default=timezone.utcnow, nullable=False) - __tablename__ = "dataset_event" + __tablename__ = "asset_event" __table_args__ = ( - Index("idx_dataset_id_timestamp", dataset_id, timestamp), + Index("idx_asset_id_timestamp", asset_id, timestamp), {"sqlite_autoincrement": True}, # ensures PK values not reused ) created_dagruns = relationship( "DagRun", secondary=association_table, - backref="consumed_dataset_events", + backref="consumed_asset_events", ) source_aliases = relationship( "AssetAliasModel", - secondary=dataset_alias_dataset_event_assocation_table, - back_populates="dataset_events", + secondary=asset_alias_asset_event_assocation_table, + back_populates="asset_events", ) source_task_instance = relationship( @@ -569,9 +543,9 @@ class AssetEvent(Base): lazy="select", uselist=False, ) - dataset = relationship( + asset = relationship( AssetModel, - primaryjoin="AssetEvent.dataset_id == foreign(AssetModel.id)", + primaryjoin="AssetEvent.asset_id == foreign(AssetModel.id)", viewonly=True, lazy="select", uselist=False, @@ -579,13 +553,13 @@ class AssetEvent(Base): @property def uri(self): - return self.dataset.uri + return self.asset.uri def __repr__(self) -> str: args = [] for attr in [ "id", - "dataset_id", + "asset_id", "extra", "source_task_id", "source_dag_id", diff --git a/airflow/models/backfill.py b/airflow/models/backfill.py index aa9cb695b7579..648b35c5bdebe 100644 --- a/airflow/models/backfill.py +++ b/airflow/models/backfill.py @@ -24,19 +24,29 @@ from __future__ import annotations import logging +from enum import Enum from typing import TYPE_CHECKING -from sqlalchemy import Boolean, Column, ForeignKeyConstraint, Integer, UniqueConstraint, func, select, update +from sqlalchemy import ( + Boolean, + Column, + ForeignKeyConstraint, + Integer, + UniqueConstraint, + desc, + func, + select, +) from sqlalchemy.orm import relationship, validates from sqlalchemy_jsonfield import JSONField -from airflow.api_connexion.exceptions import Conflict, NotFound +from airflow.api_connexion.exceptions import NotFound from airflow.exceptions import AirflowException from airflow.models.base import Base, StringID from airflow.settings import json from airflow.utils import timezone from airflow.utils.session import create_session -from airflow.utils.sqlalchemy import UtcDateTime +from airflow.utils.sqlalchemy import UtcDateTime, nulls_first, with_row_locks from airflow.utils.state import DagRunState from airflow.utils.types import DagRunTriggeredByType, DagRunType @@ -55,6 +65,18 @@ class AlreadyRunningBackfill(AirflowException): """ +class ReprocessBehavior(str, Enum): + """ + Internal enum for setting reprocess behavior in a backfill. + + :meta private: + """ + + FAILED = "failed" + COMPLETED = "completed" + NONE = "none" + + class Backfill(Base): """Model representing a backfill job.""" @@ -64,13 +86,14 @@ class Backfill(Base): dag_id = Column(StringID(), nullable=False) from_date = Column(UtcDateTime, nullable=False) to_date = Column(UtcDateTime, nullable=False) - dag_run_conf = Column(JSONField(json=json), nullable=True) + dag_run_conf = Column(JSONField(json=json), nullable=False, default={}) is_paused = Column(Boolean, default=False) """ Controls whether new dag runs will be created for this backfill. Does not pause existing dag runs. """ + reprocess_behavior = Column(StringID(), nullable=False, default=ReprocessBehavior.NONE) max_active_runs = Column(Integer, default=10, nullable=False) created_at = Column(UtcDateTime, default=timezone.utcnow, nullable=False) completed_at = Column(UtcDateTime, nullable=True) @@ -82,15 +105,27 @@ def __repr__(self): return f"Backfill({self.dag_id=}, {self.from_date=}, {self.to_date=})" +class BackfillDagRunExceptionReason(str, Enum): + """ + Enum for storing reasons why dag run not created. + + :meta private: + """ + + IN_FLIGHT = "in flight" + ALREADY_EXISTS = "already exists" + UNKNOWN = "unknown" + + class BackfillDagRun(Base): """Mapping table between backfill run and dag run.""" __tablename__ = "backfill_dag_run" id = Column(Integer, primary_key=True, autoincrement=True) backfill_id = Column(Integer, nullable=False) - dag_run_id = Column( - Integer, nullable=True - ) # the run might already exist; we could store the reason we did not create + dag_run_id = Column(Integer, nullable=True) + exception_reason = Column(StringID()) + logical_date = Column(UtcDateTime, nullable=False) sort_ordinal = Column(Integer, nullable=False) backfill = relationship("Backfill", back_populates="backfill_dag_run_associations") @@ -119,6 +154,91 @@ def validate_sort_ordinal(self, key, val): return val +def _create_backfill_dag_run( + *, + dag, + info, + reprocess_behavior: ReprocessBehavior, + backfill_id, + dag_run_conf, + backfill_sort_ordinal, + session, +): + from airflow.models import DagRun + + with session.begin_nested() as nested: + dr = session.scalar( + with_row_locks( + select(DagRun) + .where(DagRun.execution_date == info.logical_date) + .order_by(nulls_first(desc(DagRun.start_date), session=session)) + .limit(1), + session=session, + ) + ) + if dr: + non_create_reason = None + if dr.state not in (DagRunState.SUCCESS, DagRunState.FAILED): + non_create_reason = BackfillDagRunExceptionReason.IN_FLIGHT + elif reprocess_behavior is ReprocessBehavior.NONE: + non_create_reason = BackfillDagRunExceptionReason.ALREADY_EXISTS + elif reprocess_behavior is ReprocessBehavior.FAILED: + if dr.state != DagRunState.FAILED: + non_create_reason = BackfillDagRunExceptionReason.ALREADY_EXISTS + if non_create_reason: + # rolling back here restores to start of this nested tran + # which releases the lock on the latest dag run, since we + # are not creating a new one + nested.rollback() + session.add( + BackfillDagRun( + backfill_id=backfill_id, + dag_run_id=None, + logical_date=info.logical_date, + exception_reason=non_create_reason, + sort_ordinal=backfill_sort_ordinal, + ) + ) + return + + dr = dag.create_dagrun( + triggered_by=DagRunTriggeredByType.BACKFILL, + execution_date=info.logical_date, + data_interval=info.data_interval, + start_date=timezone.utcnow(), + state=DagRunState.QUEUED, + external_trigger=False, + conf=dag_run_conf, + run_type=DagRunType.BACKFILL_JOB, + creating_job_id=None, + session=session, + backfill_id=backfill_id, + ) + session.add( + BackfillDagRun( + backfill_id=backfill_id, + dag_run_id=dr.id, + sort_ordinal=backfill_sort_ordinal, + logical_date=info.logical_date, + ) + ) + + +def _get_info_list( + *, + from_date, + to_date, + reverse, + dag, +): + infos = dag.iter_dagrun_infos_between(from_date, to_date) + now = timezone.utcnow() + dagrun_info_list = (x for x in infos if x.data_interval.end < now) + if reverse: + dagrun_info_list = reversed([x for x in dag.iter_dagrun_infos_between(from_date, to_date)]) + return dagrun_info_list + + def _create_backfill( *, dag_id: str, @@ -127,16 +247,21 @@ def _create_backfill( max_active_runs: int, reverse: bool, dag_run_conf: dict | None, + reprocess_behavior: ReprocessBehavior | None = None, ) -> Backfill | None: + from airflow.models import DagModel from airflow.models.serialized_dag import SerializedDagModel with create_session() as session: serdag = session.get(SerializedDagModel, dag_id) if not serdag: raise NotFound(f"Could not find dag {dag_id}") - + # todo: if dag has no schedule, raise num_active = session.scalar( - select(func.count()).where(Backfill.dag_id == dag_id, Backfill.completed_at.is_(None)) + select(func.count()).where( + Backfill.dag_id == dag_id, + Backfill.completed_at.is_(None), + ) ) if num_active > 0: raise AlreadyRunningBackfill( @@ -144,89 +269,66 @@ def _create_backfill( f"There can be only one running backfill per dag." ) + dag = serdag.dag + depends_on_past = any(x.depends_on_past for x in dag.tasks) + if depends_on_past: + if reverse is True: + raise ValueError( + "Backfill cannot be run in reverse when the dag has tasks where depends_on_past=True" + ) + if reprocess_behavior in (None, ReprocessBehavior.NONE): + raise ValueError( + "Dag has task for which depends_on_past is true. " + "You must set reprocess behavior to reprocess completed or " + "reprocess failed" + ) br = Backfill( dag_id=dag_id, from_date=from_date, to_date=to_date, max_active_runs=max_active_runs, dag_run_conf=dag_run_conf, + reprocess_behavior=reprocess_behavior, ) session.add(br) session.commit() - dag = serdag.dag - depends_on_past = any(x.depends_on_past for x in dag.tasks) - if depends_on_past: - if reverse is True: - raise ValueError( - "Backfill cannot be run in reverse when the dag has tasks where depends_on_past=True" - ) - backfill_sort_ordinal = 0 - dagrun_info_list = dag.iter_dagrun_infos_between(from_date, to_date) - if reverse: - dagrun_info_list = reversed([x for x in dag.iter_dagrun_infos_between(from_date, to_date)]) - for info in dagrun_info_list: - backfill_sort_ordinal += 1 - log.info("creating backfill dag run %s dag_id=%s backfill_id=%s, info=", dag.dag_id, br.id, info) - dr = None - try: - dr = dag.create_dagrun( - triggered_by=DagRunTriggeredByType.BACKFILL, - execution_date=info.logical_date, - data_interval=info.data_interval, - start_date=timezone.utcnow(), - state=DagRunState.QUEUED, - external_trigger=False, - conf=br.dag_run_conf, - run_type=DagRunType.BACKFILL_JOB, - creating_job_id=None, - session=session, - backfill_id=br.id, - ) - except Exception: - dag.log.exception( - "Error while attempting to create a dag run dag_id='%s' logical_date='%s'", - dag.dag_id, - info.logical_date, - ) - session.rollback() - session.add( - BackfillDagRun( - backfill_id=br.id, - dag_run_id=dr.id if dr else None, # this means we failed to create the dag run - sort_ordinal=backfill_sort_ordinal, - ) - ) - session.commit() - return br - - -def _cancel_backfill(backfill_id) -> Backfill: - with create_session() as session: - b: Backfill = session.get(Backfill, backfill_id) - if b.completed_at is not None: - raise Conflict("Backfill is already completed.") - - b.completed_at = timezone.utcnow() - - # first, pause - if not b.is_paused: - b.is_paused = True - - session.commit() - from airflow.models import DagRun + dagrun_info_list = _get_info_list( + from_date=from_date, + to_date=to_date, + reverse=reverse, + dag=dag, + ) - # now, let's mark all queued dag runs as failed - query = ( - update(DagRun) - .where( - DagRun.id.in_(select(BackfillDagRun.dag_run_id).where(BackfillDagRun.backfill_id == b.id)), - DagRun.state == DagRunState.QUEUED, + log.info("obtaining lock on dag %s", dag_id) + # we obtain a lock on dag model so that nothing else will create + # dag runs at the same time. mainly this is required by non-uniqueness + # of logical_date. otherwise we could just create run in a try-except. + dag_model = session.scalar( + with_row_locks( + select(DagModel).where(DagModel.dag_id == dag_id), + session=session, ) - .values(state=DagRunState.FAILED) - .execution_options(synchronize_session=False) ) - session.execute(query) - return b + if not dag_model: + raise RuntimeError(f"Dag {dag_id} not found") + for info in dagrun_info_list: + backfill_sort_ordinal += 1 + _create_backfill_dag_run( + dag=dag, + info=info, + backfill_id=br.id, + dag_run_conf=br.dag_run_conf, + reprocess_behavior=br.reprocess_behavior, + backfill_sort_ordinal=backfill_sort_ordinal, + session=session, + ) + log.info( + "created backfill dag run dag_id=%s backfill_id=%s, info=%s", + dag.dag_id, + br.id, + info, + ) + return br diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 514553e05a2dd..c1448ef9cc550 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -23,17 +23,13 @@ from __future__ import annotations -import abc import collections.abc import contextlib import copy import functools -import inspect import logging -import sys -import warnings from datetime import datetime, timedelta -from functools import total_ordering, wraps +from functools import wraps from threading import local from types import FunctionType from typing import ( @@ -45,10 +41,9 @@ NoReturn, Sequence, TypeVar, - cast, ) -import attr +import methodtools import pendulum from sqlalchemy import select from sqlalchemy.orm.exc import NoResultFound @@ -56,7 +51,6 @@ from airflow.configuration import conf from airflow.exceptions import ( AirflowException, - FailStopDagInvalidTriggerRule, TaskDeferralError, TaskDeferred, ) @@ -78,12 +72,17 @@ ) from airflow.models.base import _sentinel from airflow.models.mappedoperator import OperatorPartial, validate_mapping_kwargs -from airflow.models.param import ParamsDict -from airflow.models.pool import Pool from airflow.models.taskinstance import TaskInstance, clear_task_instances from airflow.models.taskmixin import DependencyMixin + +# Keeping this file at all is a temp thing as we migrate the repo to the task sdk as the base, but to keep +# main working and useful for others to develop against we use the TaskSDK here but keep this file around +from airflow.sdk import DAG, BaseOperator as TaskSDKBaseOperator, EdgeModifier as TaskSDKEdgeModifier +from airflow.sdk.definitions.baseoperator import ( + BaseOperatorMeta as TaskSDKBaseOperatorMeta, + get_merged_defaults, +) from airflow.serialization.enums import DagAttributeTypes -from airflow.task.priority_strategy import PriorityWeightStrategy, validate_and_load_priority_weight_strategy from airflow.ti_deps.deps.mapped_task_upstream_dep import MappedTaskUpstreamDep from airflow.ti_deps.deps.not_in_retry_period_dep import NotInRetryPeriodDep from airflow.ti_deps.deps.not_previously_skipped_dep import NotPreviouslySkippedDep @@ -91,15 +90,11 @@ from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep from airflow.utils import timezone from airflow.utils.context import Context, context_get_outlet_events -from airflow.utils.decorators import fixup_decorator_warning_stack from airflow.utils.edgemodifier import EdgeModifier -from airflow.utils.helpers import validate_instance_args, validate_key from airflow.utils.operator_helpers import ExecutionCallableRunner from airflow.utils.operator_resources import Resources from airflow.utils.session import NEW_SESSION, provide_session -from airflow.utils.setup_teardown import SetupTeardownContext -from airflow.utils.trigger_rule import TriggerRule -from airflow.utils.types import NOTSET, AttributeRemoved, DagRunTriggeredByType +from airflow.utils.types import NOTSET, DagRunTriggeredByType from airflow.utils.xcom import XCOM_RETURN_KEY if TYPE_CHECKING: @@ -110,14 +105,18 @@ from airflow.models.abstractoperator import TaskStateChangeCallback from airflow.models.baseoperatorlink import BaseOperatorLink - from airflow.models.dag import DAG + from airflow.models.dag import DAG as SchedulerDAG from airflow.models.operator import Operator - from airflow.models.xcom_arg import XComArg + from airflow.task.priority_strategy import PriorityWeightStrategy from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.triggers.base import BaseTrigger, StartTriggerArgs - from airflow.utils.task_group import TaskGroup from airflow.utils.types import ArgNotSet + +# Todo: AIP-44: Once we get rid of AIP-44 we can remove this. But without this here pydantic fails to resolve +# types for serialization +from airflow.utils.task_group import TaskGroup # noqa: TCH001 + TaskPreExecuteHook = Callable[[Context], None] TaskPostExecuteHook = Callable[[Context, Any], None] @@ -139,10 +138,12 @@ def parse_retries(retries: Any) -> int | None: return parsed_retries -def coerce_timedelta(value: float | timedelta, *, key: str) -> timedelta: +def coerce_timedelta(value: float | timedelta, *, key: str | None = None) -> timedelta: if isinstance(value, timedelta): return value - logger.debug("%s isn't a timedelta object, assuming secs", key) + # TODO: remove this log here + if key: + logger.debug("%s isn't a timedelta object, assuming secs", key) return timedelta(seconds=value) @@ -152,38 +153,6 @@ def coerce_resources(resources: dict[str, Any] | None) -> Resources | None: return Resources(**resources) -def _get_parent_defaults(dag: DAG | None, task_group: TaskGroup | None) -> tuple[dict, ParamsDict]: - if not dag: - return {}, ParamsDict() - dag_args = copy.copy(dag.default_args) - dag_params = copy.deepcopy(dag.params) - if task_group: - if task_group.default_args and not isinstance(task_group.default_args, collections.abc.Mapping): - raise TypeError("default_args must be a mapping") - dag_args.update(task_group.default_args) - return dag_args, dag_params - - -def get_merged_defaults( - dag: DAG | None, - task_group: TaskGroup | None, - task_params: collections.abc.MutableMapping | None, - task_default_args: dict | None, -) -> tuple[dict, ParamsDict]: - args, params = _get_parent_defaults(dag, task_group) - if task_params: - if not isinstance(task_params, collections.abc.Mapping): - raise TypeError("params must be a mapping") - params.update(task_params) - if task_default_args: - if not isinstance(task_default_args, collections.abc.Mapping): - raise TypeError("default_args must be a mapping") - args.update(task_default_args) - with contextlib.suppress(KeyError): - params.update(task_default_args["params"] or {}) - return args, params - - class _PartialDescriptor: """A descriptor that guards against ``.partial`` being called on Task objects.""" @@ -225,161 +194,150 @@ def partial(**kwargs): # This is what handles the actual mapping. -def partial( - operator_class: type[BaseOperator], - *, - task_id: str, - dag: DAG | None = None, - task_group: TaskGroup | None = None, - start_date: datetime | ArgNotSet = NOTSET, - end_date: datetime | ArgNotSet = NOTSET, - owner: str | ArgNotSet = NOTSET, - email: None | str | Iterable[str] | ArgNotSet = NOTSET, - params: collections.abc.MutableMapping | None = None, - resources: dict[str, Any] | None | ArgNotSet = NOTSET, - trigger_rule: str | ArgNotSet = NOTSET, - depends_on_past: bool | ArgNotSet = NOTSET, - ignore_first_depends_on_past: bool | ArgNotSet = NOTSET, - wait_for_past_depends_before_skipping: bool | ArgNotSet = NOTSET, - wait_for_downstream: bool | ArgNotSet = NOTSET, - retries: int | None | ArgNotSet = NOTSET, - queue: str | ArgNotSet = NOTSET, - pool: str | ArgNotSet = NOTSET, - pool_slots: int | ArgNotSet = NOTSET, - execution_timeout: timedelta | None | ArgNotSet = NOTSET, - max_retry_delay: None | timedelta | float | ArgNotSet = NOTSET, - retry_delay: timedelta | float | ArgNotSet = NOTSET, - retry_exponential_backoff: bool | ArgNotSet = NOTSET, - priority_weight: int | ArgNotSet = NOTSET, - weight_rule: str | PriorityWeightStrategy | ArgNotSet = NOTSET, - sla: timedelta | None | ArgNotSet = NOTSET, - map_index_template: str | None | ArgNotSet = NOTSET, - max_active_tis_per_dag: int | None | ArgNotSet = NOTSET, - max_active_tis_per_dagrun: int | None | ArgNotSet = NOTSET, - on_execute_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] | ArgNotSet = NOTSET, - on_failure_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] | ArgNotSet = NOTSET, - on_success_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] | ArgNotSet = NOTSET, - on_retry_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] | ArgNotSet = NOTSET, - on_skipped_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] | ArgNotSet = NOTSET, - run_as_user: str | None | ArgNotSet = NOTSET, - executor: str | None | ArgNotSet = NOTSET, - executor_config: dict | None | ArgNotSet = NOTSET, - inlets: Any | None | ArgNotSet = NOTSET, - outlets: Any | None | ArgNotSet = NOTSET, - doc: str | None | ArgNotSet = NOTSET, - doc_md: str | None | ArgNotSet = NOTSET, - doc_json: str | None | ArgNotSet = NOTSET, - doc_yaml: str | None | ArgNotSet = NOTSET, - doc_rst: str | None | ArgNotSet = NOTSET, - task_display_name: str | None | ArgNotSet = NOTSET, - logger_name: str | None | ArgNotSet = NOTSET, - allow_nested_operators: bool = True, - **kwargs, -) -> OperatorPartial: - from airflow.models.dag import DagContext - from airflow.utils.task_group import TaskGroupContext - - validate_mapping_kwargs(operator_class, "partial", kwargs) - - dag = dag or DagContext.get_current_dag() - if dag: - task_group = task_group or TaskGroupContext.get_current_task_group(dag) - if task_group: - task_id = task_group.child_id(task_id) - - # Merge DAG and task group level defaults into user-supplied values. - dag_default_args, partial_params = get_merged_defaults( - dag=dag, - task_group=task_group, - task_params=params, - task_default_args=kwargs.pop("default_args", None), - ) - # Create partial_kwargs from args and kwargs - partial_kwargs: dict[str, Any] = { +if TYPE_CHECKING: + + def partial( + operator_class: type[BaseOperator], + *, + task_id: str, + dag: DAG | None = None, + task_group: TaskGroup | None = None, + start_date: datetime | ArgNotSet = NOTSET, + end_date: datetime | ArgNotSet = NOTSET, + owner: str | ArgNotSet = NOTSET, + email: None | str | Iterable[str] | ArgNotSet = NOTSET, + params: collections.abc.MutableMapping | None = None, + resources: dict[str, Any] | None | ArgNotSet = NOTSET, + trigger_rule: str | ArgNotSet = NOTSET, + depends_on_past: bool | ArgNotSet = NOTSET, + ignore_first_depends_on_past: bool | ArgNotSet = NOTSET, + wait_for_past_depends_before_skipping: bool | ArgNotSet = NOTSET, + wait_for_downstream: bool | ArgNotSet = NOTSET, + retries: int | None | ArgNotSet = NOTSET, + queue: str | ArgNotSet = NOTSET, + pool: str | ArgNotSet = NOTSET, + pool_slots: int | ArgNotSet = NOTSET, + execution_timeout: timedelta | None | ArgNotSet = NOTSET, + max_retry_delay: None | timedelta | float | ArgNotSet = NOTSET, + retry_delay: timedelta | float | ArgNotSet = NOTSET, + retry_exponential_backoff: bool | ArgNotSet = NOTSET, + priority_weight: int | ArgNotSet = NOTSET, + weight_rule: str | PriorityWeightStrategy | ArgNotSet = NOTSET, + sla: timedelta | None | ArgNotSet = NOTSET, + map_index_template: str | None | ArgNotSet = NOTSET, + max_active_tis_per_dag: int | None | ArgNotSet = NOTSET, + max_active_tis_per_dagrun: int | None | ArgNotSet = NOTSET, + on_execute_callback: None + | TaskStateChangeCallback + | list[TaskStateChangeCallback] + | ArgNotSet = NOTSET, + on_failure_callback: None + | TaskStateChangeCallback + | list[TaskStateChangeCallback] + | ArgNotSet = NOTSET, + on_success_callback: None + | TaskStateChangeCallback + | list[TaskStateChangeCallback] + | ArgNotSet = NOTSET, + on_retry_callback: None + | TaskStateChangeCallback + | list[TaskStateChangeCallback] + | ArgNotSet = NOTSET, + on_skipped_callback: None + | TaskStateChangeCallback + | list[TaskStateChangeCallback] + | ArgNotSet = NOTSET, + run_as_user: str | None | ArgNotSet = NOTSET, + executor: str | None | ArgNotSet = NOTSET, + executor_config: dict | None | ArgNotSet = NOTSET, + inlets: Any | None | ArgNotSet = NOTSET, + outlets: Any | None | ArgNotSet = NOTSET, + doc: str | None | ArgNotSet = NOTSET, + doc_md: str | None | ArgNotSet = NOTSET, + doc_json: str | None | ArgNotSet = NOTSET, + doc_yaml: str | None | ArgNotSet = NOTSET, + doc_rst: str | None | ArgNotSet = NOTSET, + task_display_name: str | None | ArgNotSet = NOTSET, + logger_name: str | None | ArgNotSet = NOTSET, + allow_nested_operators: bool = True, + **kwargs, + ) -> OperatorPartial: ... +else: + + def partial( + operator_class: type[BaseOperator], + *, + task_id: str, + dag: DAG | None = None, + task_group: TaskGroup | None = None, + params: collections.abc.MutableMapping | None = None, **kwargs, - "dag": dag, - "task_group": task_group, - "task_id": task_id, - "map_index_template": map_index_template, - "start_date": start_date, - "end_date": end_date, - "owner": owner, - "email": email, - "trigger_rule": trigger_rule, - "depends_on_past": depends_on_past, - "ignore_first_depends_on_past": ignore_first_depends_on_past, - "wait_for_past_depends_before_skipping": wait_for_past_depends_before_skipping, - "wait_for_downstream": wait_for_downstream, - "retries": retries, - "queue": queue, - "pool": pool, - "pool_slots": pool_slots, - "execution_timeout": execution_timeout, - "max_retry_delay": max_retry_delay, - "retry_delay": retry_delay, - "retry_exponential_backoff": retry_exponential_backoff, - "priority_weight": priority_weight, - "weight_rule": weight_rule, - "sla": sla, - "max_active_tis_per_dag": max_active_tis_per_dag, - "max_active_tis_per_dagrun": max_active_tis_per_dagrun, - "on_execute_callback": on_execute_callback, - "on_failure_callback": on_failure_callback, - "on_retry_callback": on_retry_callback, - "on_success_callback": on_success_callback, - "on_skipped_callback": on_skipped_callback, - "run_as_user": run_as_user, - "executor": executor, - "executor_config": executor_config, - "inlets": inlets, - "outlets": outlets, - "resources": resources, - "doc": doc, - "doc_json": doc_json, - "doc_md": doc_md, - "doc_rst": doc_rst, - "doc_yaml": doc_yaml, - "task_display_name": task_display_name, - "logger_name": logger_name, - "allow_nested_operators": allow_nested_operators, - } - - # Inject DAG-level default args into args provided to this function. - partial_kwargs.update((k, v) for k, v in dag_default_args.items() if partial_kwargs.get(k) is NOTSET) - - # Fill fields not provided by the user with default values. - partial_kwargs = {k: _PARTIAL_DEFAULTS.get(k) if v is NOTSET else v for k, v in partial_kwargs.items()} - - # Post-process arguments. Should be kept in sync with _TaskDecorator.expand(). - if "task_concurrency" in kwargs: # Reject deprecated option. - raise TypeError("unexpected argument: task_concurrency") - if partial_kwargs["wait_for_downstream"]: - partial_kwargs["depends_on_past"] = True - partial_kwargs["start_date"] = timezone.convert_to_utc(partial_kwargs["start_date"]) - partial_kwargs["end_date"] = timezone.convert_to_utc(partial_kwargs["end_date"]) - if partial_kwargs["pool"] is None: - partial_kwargs["pool"] = Pool.DEFAULT_POOL_NAME - if partial_kwargs["pool_slots"] < 1: - dag_str = "" + ): + from airflow.sdk.definitions.contextmanager import DagContext, TaskGroupContext + + validate_mapping_kwargs(operator_class, "partial", kwargs) + + dag = dag or DagContext.get_current() if dag: - dag_str = f" in dag {dag.dag_id}" - raise ValueError(f"pool slots for {task_id}{dag_str} cannot be less than 1") - partial_kwargs["retries"] = parse_retries(partial_kwargs["retries"]) - partial_kwargs["retry_delay"] = coerce_timedelta(partial_kwargs["retry_delay"], key="retry_delay") - if partial_kwargs["max_retry_delay"] is not None: - partial_kwargs["max_retry_delay"] = coerce_timedelta( - partial_kwargs["max_retry_delay"], - key="max_retry_delay", + task_group = task_group or TaskGroupContext.get_current(dag) + if task_group: + task_id = task_group.child_id(task_id) + + # Merge DAG and task group level defaults into user-supplied values. + dag_default_args, partial_params = get_merged_defaults( + dag=dag, + task_group=task_group, + task_params=params, + task_default_args=kwargs.pop("default_args", None), ) - partial_kwargs["executor_config"] = partial_kwargs["executor_config"] or {} - partial_kwargs["resources"] = coerce_resources(partial_kwargs["resources"]) - return OperatorPartial( - operator_class=operator_class, - kwargs=partial_kwargs, - params=partial_params, - ) + # Create partial_kwargs from args and kwargs + partial_kwargs: dict[str, Any] = { + "task_id": task_id, + "dag": dag, + "task_group": task_group, + **kwargs, + } + + # Inject DAG-level default args into args provided to this function. + partial_kwargs.update( + (k, v) for k, v in dag_default_args.items() if partial_kwargs.get(k, NOTSET) is NOTSET + ) + + # Fill fields not provided by the user with default values. + for k, v in _PARTIAL_DEFAULTS.items(): + partial_kwargs.setdefault(k, v) + + # Post-process arguments. Should be kept in sync with _TaskDecorator.expand(). + if "task_concurrency" in kwargs: # Reject deprecated option. + raise TypeError("unexpected argument: task_concurrency") + if wait := partial_kwargs.get("wait_for_downstream", False): + partial_kwargs["depends_on_past"] = wait + if start_date := partial_kwargs.get("start_date", None): + partial_kwargs["start_date"] = timezone.convert_to_utc(start_date) + if end_date := partial_kwargs.get("end_date", None): + partial_kwargs["end_date"] = timezone.convert_to_utc(end_date) + if partial_kwargs["pool_slots"] < 1: + dag_str = "" + if dag: + dag_str = f" in dag {dag.dag_id}" + raise ValueError(f"pool slots for {task_id}{dag_str} cannot be less than 1") + if retries := partial_kwargs.get("retries"): + partial_kwargs["retries"] = parse_retries(retries) + partial_kwargs["retry_delay"] = coerce_timedelta(partial_kwargs["retry_delay"], key="retry_delay") + if partial_kwargs.get("max_retry_delay", None) is not None: + partial_kwargs["max_retry_delay"] = coerce_timedelta( + partial_kwargs["max_retry_delay"], + key="max_retry_delay", + ) + partial_kwargs.setdefault("executor_config", {}) + + return OperatorPartial( + operator_class=operator_class, + kwargs=partial_kwargs, + params=partial_params, + ) class ExecutorSafeguard: @@ -419,103 +377,9 @@ def wrapper(self, *args, **kwargs): return wrapper -class BaseOperatorMeta(abc.ABCMeta): - """Metaclass of BaseOperator.""" - - @classmethod - def _apply_defaults(cls, func: T) -> T: - """ - Look for an argument named "default_args", and fill the unspecified arguments from it. - - Since python2.* isn't clear about which arguments are missing when - calling a function, and that this can be quite confusing with multi-level - inheritance and argument defaults, this decorator also alerts with - specific information about the missing arguments. - """ - # Cache inspect.signature for the wrapper closure to avoid calling it - # at every decorated invocation. This is separate sig_cache created - # per decoration, i.e. each function decorated using apply_defaults will - # have a different sig_cache. - sig_cache = inspect.signature(func) - non_variadic_params = { - name: param - for (name, param) in sig_cache.parameters.items() - if param.name != "self" and param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD) - } - non_optional_args = { - name - for name, param in non_variadic_params.items() - if param.default == param.empty and name != "task_id" - } - - fixup_decorator_warning_stack(func) - - @wraps(func) - def apply_defaults(self: BaseOperator, *args: Any, **kwargs: Any) -> Any: - from airflow.models.dag import DagContext - from airflow.utils.task_group import TaskGroupContext - - if args: - raise AirflowException("Use keyword arguments when initializing operators") - - instantiated_from_mapped = kwargs.pop( - "_airflow_from_mapped", - getattr(self, "_BaseOperator__from_mapped", False), - ) - - dag: DAG | None = kwargs.get("dag") or DagContext.get_current_dag() - task_group: TaskGroup | None = kwargs.get("task_group") - if dag and not task_group: - task_group = TaskGroupContext.get_current_task_group(dag) - - default_args, merged_params = get_merged_defaults( - dag=dag, - task_group=task_group, - task_params=kwargs.pop("params", None), - task_default_args=kwargs.pop("default_args", None), - ) - - for arg in sig_cache.parameters: - if arg not in kwargs and arg in default_args: - kwargs[arg] = default_args[arg] - - missing_args = non_optional_args.difference(kwargs) - if len(missing_args) == 1: - raise AirflowException(f"missing keyword argument {missing_args.pop()!r}") - elif missing_args: - display = ", ".join(repr(a) for a in sorted(missing_args)) - raise AirflowException(f"missing keyword arguments {display}") - - if merged_params: - kwargs["params"] = merged_params - - hook = getattr(self, "_hook_apply_defaults", None) - if hook: - args, kwargs = hook(**kwargs, default_args=default_args) - default_args = kwargs.pop("default_args", {}) - - if not hasattr(self, "_BaseOperator__init_kwargs"): - self._BaseOperator__init_kwargs = {} - self._BaseOperator__from_mapped = instantiated_from_mapped - - result = func(self, **kwargs, default_args=default_args) - - # Store the args passed to init -- we need them to support task.map serialization! - self._BaseOperator__init_kwargs.update(kwargs) # type: ignore - - # Set upstream task defined by XComArgs passed to template fields of the operator. - # BUT: only do this _ONCE_, not once for each class in the hierarchy - if not instantiated_from_mapped and func == self.__init__.__wrapped__: # type: ignore[misc] - self.set_xcomargs_dependencies() - # Mark instance as instantiated. - self._BaseOperator__instantiated = True - - return result - - apply_defaults.__non_optional_args = non_optional_args # type: ignore - apply_defaults.__param_names = set(non_variadic_params) # type: ignore - - return cast(T, apply_defaults) +# TODO: Task-SDK - temporarily extend the metaclass to add in the ExecutorSafeguard. +class BaseOperatorMeta(TaskSDKBaseOperatorMeta): + """:meta private:""" # noqa: D400 def __new__(cls, name, bases, namespace, **kwargs): execute_method = namespace.get("execute") @@ -528,57 +392,10 @@ def __new__(cls, name, bases, namespace, **kwargs): partial_desc = vars(new_cls)["partial"] if isinstance(partial_desc, _PartialDescriptor): partial_desc.class_method = classmethod(partial) - - # We patch `__init__` only if the class defines it. - if inspect.getmro(new_cls)[1].__init__ is not new_cls.__init__: - new_cls.__init__ = cls._apply_defaults(new_cls.__init__) - return new_cls -# TODO: The following mapping is used to validate that the arguments passed to the BaseOperator are of the -# correct type. This is a temporary solution until we find a more sophisticated method for argument -# validation. One potential method is to use `get_type_hints` from the typing module. However, this is not -# fully compatible with future annotations for Python versions below 3.10. Once we require a minimum Python -# version that supports `get_type_hints` effectively or find a better approach, we can replace this -# manual type-checking method. -BASEOPERATOR_ARGS_EXPECTED_TYPES = { - "task_id": str, - "email": (str, Iterable), - "email_on_retry": bool, - "email_on_failure": bool, - "retries": int, - "retry_exponential_backoff": bool, - "depends_on_past": bool, - "ignore_first_depends_on_past": bool, - "wait_for_past_depends_before_skipping": bool, - "wait_for_downstream": bool, - "priority_weight": int, - "queue": str, - "pool": str, - "pool_slots": int, - "trigger_rule": str, - "run_as_user": str, - "task_concurrency": int, - "map_index_template": str, - "max_active_tis_per_dag": int, - "max_active_tis_per_dagrun": int, - "executor": str, - "do_xcom_push": bool, - "multiple_outputs": bool, - "doc": str, - "doc_md": str, - "doc_json": str, - "doc_yaml": str, - "doc_rst": str, - "task_display_name": str, - "logger_name": str, - "allow_nested_operators": bool, -} - - -@total_ordering -class BaseOperator(AbstractOperator, metaclass=BaseOperatorMeta): +class BaseOperator(TaskSDKBaseOperator, AbstractOperator, metaclass=BaseOperatorMeta): r""" Abstract base class for all operators. @@ -783,401 +600,71 @@ def say_hello_world(**context): hello_world_task.execute(context) """ - # Implementing Operator. - template_fields: Sequence[str] = () - template_ext: Sequence[str] = () - - template_fields_renderers: dict[str, str] = {} - - # Defines the color in the UI - ui_color: str = "#fff" - ui_fgcolor: str = "#000" - - pool: str = "" - - # base list which includes all the attrs that don't need deep copy. - _base_operator_shallow_copy_attrs: tuple[str, ...] = ( - "user_defined_macros", - "user_defined_filters", - "params", - ) - - # each operator should override this class attr for shallow copy attrs. - shallow_copy_attrs: Sequence[str] = () - - # Defines the operator level extra links - operator_extra_links: Collection[BaseOperatorLink] = () - - # The _serialized_fields are lazily loaded when get_serialized_fields() method is called - __serialized_fields: frozenset[str] | None = None - - partial: Callable[..., OperatorPartial] = _PartialDescriptor() # type: ignore - - _comps = { - "task_id", - "dag_id", - "owner", - "email", - "email_on_retry", - "retry_delay", - "retry_exponential_backoff", - "max_retry_delay", - "start_date", - "end_date", - "depends_on_past", - "wait_for_downstream", - "priority_weight", - "sla", - "execution_timeout", - "on_execute_callback", - "on_failure_callback", - "on_success_callback", - "on_retry_callback", - "on_skipped_callback", - "do_xcom_push", - "multiple_outputs", - "allow_nested_operators", - "executor", - } - - # Defines if the operator supports lineage without manual definitions - supports_lineage = False - - # If True then the class constructor was called - __instantiated = False - # List of args as passed to `init()`, after apply_defaults() has been updated. Used to "recreate" the task - # when mapping - __init_kwargs: dict[str, Any] - - # Set to True before calling execute method - _lock_for_execution = False - - _dag: DAG | None = None - task_group: TaskGroup | None = None - - start_date: pendulum.DateTime | None = None - end_date: pendulum.DateTime | None = None - - # Set to True for an operator instantiated by a mapped operator. - __from_mapped = False - start_trigger_args: StartTriggerArgs | None = None start_from_trigger: bool = False + on_execute_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None + on_failure_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None + on_success_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None + on_retry_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None + on_skipped_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None + def __init__( self, - task_id: str, - owner: str = DEFAULT_OWNER, - email: str | Iterable[str] | None = None, - email_on_retry: bool = conf.getboolean("email", "default_email_on_retry", fallback=True), - email_on_failure: bool = conf.getboolean("email", "default_email_on_failure", fallback=True), - retries: int | None = DEFAULT_RETRIES, - retry_delay: timedelta | float = DEFAULT_RETRY_DELAY, - retry_exponential_backoff: bool = False, - max_retry_delay: timedelta | float | None = None, - start_date: datetime | None = None, - end_date: datetime | None = None, - depends_on_past: bool = False, - ignore_first_depends_on_past: bool = DEFAULT_IGNORE_FIRST_DEPENDS_ON_PAST, - wait_for_past_depends_before_skipping: bool = DEFAULT_WAIT_FOR_PAST_DEPENDS_BEFORE_SKIPPING, - wait_for_downstream: bool = False, - dag: DAG | None = None, - params: collections.abc.MutableMapping | None = None, - default_args: dict | None = None, - priority_weight: int = DEFAULT_PRIORITY_WEIGHT, - weight_rule: str | PriorityWeightStrategy = DEFAULT_WEIGHT_RULE, - queue: str = DEFAULT_QUEUE, - pool: str | None = None, - pool_slots: int = DEFAULT_POOL_SLOTS, - sla: timedelta | None = None, - execution_timeout: timedelta | None = DEFAULT_TASK_EXECUTION_TIMEOUT, + pre_execute=None, + post_execute=None, on_execute_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None, on_failure_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None, on_success_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None, on_retry_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None, on_skipped_callback: None | TaskStateChangeCallback | list[TaskStateChangeCallback] = None, - pre_execute: TaskPreExecuteHook | None = None, - post_execute: TaskPostExecuteHook | None = None, - trigger_rule: str = DEFAULT_TRIGGER_RULE, - resources: dict[str, Any] | None = None, - run_as_user: str | None = None, - map_index_template: str | None = None, - max_active_tis_per_dag: int | None = None, - max_active_tis_per_dagrun: int | None = None, - executor: str | None = None, - executor_config: dict | None = None, - do_xcom_push: bool = True, - multiple_outputs: bool = False, - inlets: Any | None = None, - outlets: Any | None = None, - task_group: TaskGroup | None = None, - doc: str | None = None, - doc_md: str | None = None, - doc_json: str | None = None, - doc_yaml: str | None = None, - doc_rst: str | None = None, - task_display_name: str | None = None, - logger_name: str | None = None, - allow_nested_operators: bool = True, **kwargs, ): - from airflow.models.dag import DagContext - from airflow.utils.task_group import TaskGroupContext - - self.__init_kwargs = {} - - super().__init__() - - kwargs.pop("_airflow_mapped_validation_only", None) - if kwargs: - raise AirflowException( - f"Invalid arguments were passed to {self.__class__.__name__} (task_id: {task_id}). " - f"Invalid arguments were:\n**kwargs: {kwargs}", - ) - validate_key(task_id) - - dag = dag or DagContext.get_current_dag() - task_group = task_group or TaskGroupContext.get_current_task_group(dag) - - self.task_id = task_group.child_id(task_id) if task_group else task_id - if not self.__from_mapped and task_group: - task_group.add(self) - - self.owner = owner - self.email = email - self.email_on_retry = email_on_retry - self.email_on_failure = email_on_failure - - if execution_timeout is not None and not isinstance(execution_timeout, timedelta): - raise ValueError( - f"execution_timeout must be timedelta object but passed as type: {type(execution_timeout)}" - ) - self.execution_timeout = execution_timeout + if start_date := kwargs.get("start_date", None): + kwargs["start_date"] = timezone.convert_to_utc(start_date) + if end_date := kwargs.get("end_date", None): + kwargs["end_date"] = timezone.convert_to_utc(end_date) + super().__init__(**kwargs) + self._pre_execute_hook = pre_execute + self._post_execute_hook = post_execute self.on_execute_callback = on_execute_callback self.on_failure_callback = on_failure_callback self.on_success_callback = on_success_callback - self.on_retry_callback = on_retry_callback self.on_skipped_callback = on_skipped_callback - self._pre_execute_hook = pre_execute - self._post_execute_hook = post_execute - - if start_date and not isinstance(start_date, datetime): - self.log.warning("start_date for %s isn't datetime.datetime", self) - elif start_date: - self.start_date = timezone.convert_to_utc(start_date) - - if end_date: - self.end_date = timezone.convert_to_utc(end_date) - - self.executor = executor - self.executor_config = executor_config or {} - self.run_as_user = run_as_user - self.retries = parse_retries(retries) - self.queue = queue - self.pool = Pool.DEFAULT_POOL_NAME if pool is None else pool - self.pool_slots = pool_slots - if self.pool_slots < 1: - dag_str = f" in dag {dag.dag_id}" if dag else "" - raise ValueError(f"pool slots for {self.task_id}{dag_str} cannot be less than 1") - - if sla: - self.log.warning( - "The SLA feature is removed in Airflow 3.0, to be replaced with a new implementation in 3.1" - ) - - if not TriggerRule.is_valid(trigger_rule): - raise AirflowException( - f"The trigger_rule must be one of {TriggerRule.all_triggers()}," - f"'{dag.dag_id if dag else ''}.{task_id}'; received '{trigger_rule}'." - ) - - self.trigger_rule: TriggerRule = TriggerRule(trigger_rule) - FailStopDagInvalidTriggerRule.check(dag=dag, trigger_rule=self.trigger_rule) - - self.depends_on_past: bool = depends_on_past - self.ignore_first_depends_on_past: bool = ignore_first_depends_on_past - self.wait_for_past_depends_before_skipping: bool = wait_for_past_depends_before_skipping - self.wait_for_downstream: bool = wait_for_downstream - if wait_for_downstream: - self.depends_on_past = True - - self.retry_delay = coerce_timedelta(retry_delay, key="retry_delay") - self.retry_exponential_backoff = retry_exponential_backoff - self.max_retry_delay = ( - max_retry_delay - if max_retry_delay is None - else coerce_timedelta(max_retry_delay, key="max_retry_delay") - ) - - # At execution_time this becomes a normal dict - self.params: ParamsDict | dict = ParamsDict(params) - if priority_weight is not None and not isinstance(priority_weight, int): - raise AirflowException( - f"`priority_weight` for task '{self.task_id}' only accepts integers, " - f"received '{type(priority_weight)}'." - ) - self.priority_weight = priority_weight - self.weight_rule = validate_and_load_priority_weight_strategy(weight_rule) - self.resources = coerce_resources(resources) - self.max_active_tis_per_dag: int | None = max_active_tis_per_dag - self.max_active_tis_per_dagrun: int | None = max_active_tis_per_dagrun - self.do_xcom_push: bool = do_xcom_push - self.map_index_template: str | None = map_index_template - self.multiple_outputs: bool = multiple_outputs - - self.doc_md = doc_md - self.doc_json = doc_json - self.doc_yaml = doc_yaml - self.doc_rst = doc_rst - self.doc = doc - # Populate the display field only if provided and different from task id - self._task_display_property_value = ( - task_display_name if task_display_name and task_display_name != task_id else None - ) - - self.upstream_task_ids: set[str] = set() - self.downstream_task_ids: set[str] = set() - - if dag: - self.dag = dag - - self._log_config_logger_name = "airflow.task.operators" - self._logger_name = logger_name - self.allow_nested_operators: bool = allow_nested_operators - - # Lineage - self.inlets: list = [] - self.outlets: list = [] - - if inlets: - self.inlets = ( - inlets - if isinstance(inlets, list) - else [ - inlets, - ] - ) - - if outlets: - self.outlets = ( - outlets - if isinstance(outlets, list) - else [ - outlets, - ] - ) - - if isinstance(self.template_fields, str): - warnings.warn( - f"The `template_fields` value for {self.task_type} is a string " - "but should be a list or tuple of string. Wrapping it in a list for execution. " - f"Please update {self.task_type} accordingly.", - UserWarning, - stacklevel=2, - ) - self.template_fields = [self.template_fields] - - self._is_setup = False - self._is_teardown = False - if SetupTeardownContext.active: - SetupTeardownContext.update_context_map(self) - - validate_instance_args(self, BASEOPERATOR_ARGS_EXPECTED_TYPES) - - def __eq__(self, other): - if type(self) is type(other): - # Use getattr() instead of __dict__ as __dict__ doesn't return - # correct values for properties. - return all(getattr(self, c, None) == getattr(other, c, None) for c in self._comps) - return False - - def __ne__(self, other): - return not self == other - - def __hash__(self): - hash_components = [type(self)] - for component in self._comps: - val = getattr(self, component, None) - try: - hash(val) - hash_components.append(val) - except TypeError: - hash_components.append(repr(val)) - return hash(tuple(hash_components)) - - # including lineage information - def __or__(self, other): - """ - Return [This Operator] | [Operator]. - - The inlets of other will be set to pick up the outlets from this operator. - Other will be set as a downstream task of this operator. - """ - if isinstance(other, BaseOperator): - if not self.outlets and not self.supports_lineage: - raise ValueError("No outlets defined for this operator") - other.add_inlets([self.task_id]) - self.set_downstream(other) - else: - raise TypeError(f"Right hand side ({other}) is not an Operator") - - return self - - # /Composing Operators --------------------------------------------- - - def __gt__(self, other): - """ - Return [Operator] > [Outlet]. - - If other is an attr annotated object it is set as an outlet of this Operator. - """ - if not isinstance(other, Iterable): - other = [other] - - for obj in other: - if not attr.has(obj): - raise TypeError(f"Left hand side ({obj}) is not an outlet") - self.add_outlets(other) + self.on_retry_callback = on_retry_callback - return self + # Defines the operator level extra links + operator_extra_links: Collection[BaseOperatorLink] = () - def __lt__(self, other): - """ - Return [Inlet] > [Operator] or [Operator] < [Inlet]. + if TYPE_CHECKING: - If other is an attr annotated object it is set as an inlet to this operator. - """ - if not isinstance(other, Iterable): - other = [other] + @property # type: ignore[override] + def dag(self) -> SchedulerDAG: # type: ignore[override] + return super().dag # type: ignore[return-value] - for obj in other: - if not attr.has(obj): - raise TypeError(f"{obj} cannot be an inlet") - self.add_inlets(other) + @dag.setter + def dag(self, val: SchedulerDAG): + # For type checking only + ... - return self + partial: Callable[..., OperatorPartial] = _PartialDescriptor() # type: ignore - def __setattr__(self, key, value): - super().__setattr__(key, value) - if self.__from_mapped or self._lock_for_execution: - return # Skip any custom behavior for validation and during execute. - if key in self.__init_kwargs: - self.__init_kwargs[key] = value - if self.__instantiated and key in self.template_fields: - # Resolve upstreams set by assigning an XComArg after initializing - # an operator, example: - # op = BashOperator() - # op.bash_command = "sleep 1" - self.set_xcomargs_dependencies() - - def add_inlets(self, inlets: Iterable[Any]): - """Set inlets to this operator.""" - self.inlets.extend(inlets) - - def add_outlets(self, outlets: Iterable[Any]): - """Define the outlets of this operator.""" - self.outlets.extend(outlets) + @classmethod + @methodtools.lru_cache(maxsize=None) + def get_serialized_fields(cls): + """Stringified DAGs and operators contain exactly these fields.""" + # TODO: this ends up caching it once per-subclass, which isn't what we want, but this class is only + # kept around during the development of AIP-72/TaskSDK code. + return TaskSDKBaseOperator.get_serialized_fields() | { + "start_trigger_args", + "start_from_trigger", + "on_execute_callback", + "on_failure_callback", + "on_success_callback", + "on_retry_callback", + "on_skipped_callback", + } def get_inlet_defs(self): """ @@ -1195,55 +682,6 @@ def get_outlet_defs(self): """ return self.outlets - def get_dag(self) -> DAG | None: - return self._dag - - @property # type: ignore[override] - def dag(self) -> DAG: # type: ignore[override] - """Returns the Operator's DAG if set, otherwise raises an error.""" - if self._dag: - return self._dag - else: - raise AirflowException(f"Operator {self} has not been assigned to a DAG yet") - - @dag.setter - def dag(self, dag: DAG | None): - """Operators can be assigned to one DAG, one time. Repeat assignments to that same DAG are ok.""" - if dag is None: - self._dag = None - return - - # if set to removed, then just set and exit - if self._dag.__class__ is AttributeRemoved: - self._dag = dag - return - # if setting to removed, then just set and exit - if dag.__class__ is AttributeRemoved: - self._dag = AttributeRemoved("_dag") # type: ignore[assignment] - return - - from airflow.models.dag import DAG - - if not isinstance(dag, DAG): - raise TypeError(f"Expected DAG; received {dag.__class__.__name__}") - elif self.has_dag() and self.dag is not dag: - raise AirflowException(f"The DAG assigned to {self} can not be changed.") - - if self.__from_mapped: - pass # Don't add to DAG -- the mapped task takes the place. - elif dag.task_dict.get(self.task_id) is not self: - dag.add_task(self) - - self._dag = dag - - @property - def task_display_name(self) -> str: - return self._task_display_property_value or self.task_id - - def has_dag(self): - """Return True if the Operator has been assigned to a DAG.""" - return self._dag is not None - deps: frozenset[BaseTIDep] = frozenset( { NotInRetryPeriodDep(), @@ -1265,33 +703,6 @@ def prepare_for_execution(self) -> BaseOperator: other._lock_for_execution = True return other - def set_xcomargs_dependencies(self) -> None: - """ - Resolve upstream dependencies of a task. - - In this way passing an ``XComArg`` as value for a template field - will result in creating upstream relation between two tasks. - - **Example**: :: - - with DAG(...): - generate_content = GenerateContentOperator(task_id="generate_content") - send_email = EmailOperator(..., html_content=generate_content.output) - - # This is equivalent to - with DAG(...): - generate_content = GenerateContentOperator(task_id="generate_content") - send_email = EmailOperator(..., html_content="{{ task_instance.xcom_pull('generate_content') }}") - generate_content >> send_email - - """ - from airflow.models.xcom_arg import XComArg - - for field in self.template_fields: - if hasattr(self, field): - arg = getattr(self, field) - XComArg.apply_upstream_relationship(self, arg) - @prepare_lineage def pre_execute(self, context: Any): """Execute right before self.execute() is called.""" @@ -1328,46 +739,6 @@ def post_execute(self, context: Any, result: Any = None): logger=self.log, ).run(context, result) - def on_kill(self) -> None: - """ - Override this method to clean up subprocesses when a task instance gets killed. - - Any use of the threading, subprocess or multiprocessing module within an - operator needs to be cleaned up, or it will leave ghost processes behind. - """ - - def __deepcopy__(self, memo): - # Hack sorting double chained task lists by task_id to avoid hitting - # max_depth on deepcopy operations. - sys.setrecursionlimit(5000) # TODO fix this in a better way - - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - - shallow_copy = cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs - - for k, v in self.__dict__.items(): - if k == "_BaseOperator__instantiated": - # Don't set this until the _end_, as it changes behaviour of __setattr__ - continue - if k not in shallow_copy: - setattr(result, k, copy.deepcopy(v, memo)) - else: - setattr(result, k, copy.copy(v)) - result.__instantiated = self.__instantiated - return result - - def __getstate__(self): - state = dict(self.__dict__) - if self._log: - del state["_log"] - - return state - - def __setstate__(self, state): - self.__dict__ = state - def render_template_fields( self, context: Context, @@ -1413,6 +784,12 @@ def clear( qry = qry.where(TaskInstance.task_id.in_(tasks)) results = session.scalars(qry).all() count = len(results) + + if TYPE_CHECKING: + # TODO: Task-SDK: We need to set this to the scheduler DAG until we fully separate scheduling and + # definition code + assert isinstance(self.dag, SchedulerDAG) + clear_task_instances(results, session, dag=self.dag) session.commit() return count @@ -1460,6 +837,10 @@ def run( if TYPE_CHECKING: assert self.start_date + # TODO: Task-SDK: We need to set this to the scheduler DAG until we fully separate scheduling and + # definition code + assert isinstance(self.dag, SchedulerDAG) + start_date = pendulum.instance(start_date or self.start_date) end_date = pendulum.instance(end_date or self.end_date or timezone.utcnow()) @@ -1520,83 +901,6 @@ def get_direct_relatives(self, upstream: bool = False) -> Iterable[Operator]: else: return self.downstream_list - def __repr__(self): - return f"" - - @property - def operator_class(self) -> type[BaseOperator]: # type: ignore[override] - return self.__class__ - - @property - def task_type(self) -> str: - """@property: type of the task.""" - return self.__class__.__name__ - - @property - def operator_name(self) -> str: - """@property: use a more friendly display name for the operator, if set.""" - try: - return self.custom_operator_name # type: ignore - except AttributeError: - return self.task_type - - @property - def roots(self) -> list[BaseOperator]: - """Required by DAGNode.""" - return [self] - - @property - def leaves(self) -> list[BaseOperator]: - """Required by DAGNode.""" - return [self] - - @property - def output(self) -> XComArg: - """Returns reference to XCom pushed by current operator.""" - from airflow.models.xcom_arg import XComArg - - return XComArg(operator=self) - - @property - def is_setup(self) -> bool: - """ - Whether the operator is a setup task. - - :meta private: - """ - return self._is_setup - - @is_setup.setter - def is_setup(self, value: bool) -> None: - """ - Setter for is_setup property. - - :meta private: - """ - if self.is_teardown and value: - raise ValueError(f"Cannot mark task '{self.task_id}' as setup; task is already a teardown.") - self._is_setup = value - - @property - def is_teardown(self) -> bool: - """ - Whether the operator is a teardown task. - - :meta private: - """ - return self._is_teardown - - @is_teardown.setter - def is_teardown(self, value: bool) -> None: - """ - Setter for is_teardown property. - - :meta private: - """ - if self.is_setup and value: - raise ValueError(f"Cannot mark task '{self.task_id}' as teardown; task is already a setup.") - self._is_teardown = value - @staticmethod def xcom_push( context: Any, @@ -1657,68 +961,10 @@ def xcom_pull( session=session, ) - @classmethod - def get_serialized_fields(cls): - """Stringified DAGs and operators contain exactly these fields.""" - if not cls.__serialized_fields: - from airflow.models.dag import DagContext - - # make sure the following dummy task is not added to current active - # dag in context, otherwise, it will result in - # `RuntimeError: dictionary changed size during iteration` - # Exception in SerializedDAG.serialize_dag() call. - DagContext.push_context_managed_dag(None) - cls.__serialized_fields = frozenset( - vars(BaseOperator(task_id="test")).keys() - - { - "upstream_task_ids", - "default_args", - "dag", - "_dag", - "label", - "_BaseOperator__instantiated", - "_BaseOperator__init_kwargs", - "_BaseOperator__from_mapped", - "_is_setup", - "_is_teardown", - "_on_failure_fail_dagrun", - } - | { # Class level defaults need to be added to this list - "start_date", - "end_date", - "_task_type", - "_operator_name", - "ui_color", - "ui_fgcolor", - "template_ext", - "template_fields", - "template_fields_renderers", - "params", - "is_setup", - "is_teardown", - "on_failure_fail_dagrun", - "map_index_template", - "start_trigger_args", - "_needs_expansion", - "start_from_trigger", - } - ) - DagContext.pop_context_managed_dag() - - return cls.__serialized_fields - def serialize_for_task_group(self) -> tuple[DagAttributeTypes, Any]: """Serialize; required by DAGNode.""" return DagAttributeTypes.OP, self.task_id - @property - def inherits_from_empty_operator(self): - """Used to determine if an Operator is inherited from EmptyOperator.""" - # This looks like `isinstance(self, EmptyOperator) would work, but this also - # needs to cope when `self` is a Serialized instance of a EmptyOperator or one - # of its subclasses (which don't inherit from anything but BaseOperator). - return getattr(self, "_is_empty", False) - def defer( self, *, @@ -2038,7 +1284,7 @@ def chain_linear(*elements: DependencyMixin | Sequence[DependencyMixin]): prev_elem = None deps_set = False for curr_elem in elements: - if isinstance(curr_elem, EdgeModifier): + if isinstance(curr_elem, (EdgeModifier, TaskSDKEdgeModifier)): raise ValueError("Labels are not supported by chain_linear") if prev_elem is not None: for task in prev_elem: diff --git a/airflow/models/dag.py b/airflow/models/dag.py index f0a7d7f56be2c..851d2a5129346 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -20,19 +20,15 @@ import asyncio import copy import functools -import itertools import logging -import os import pathlib import pickle import sys import time import traceback -import weakref -from collections import abc, defaultdict, deque +from collections import defaultdict from contextlib import ExitStack from datetime import datetime, timedelta -from inspect import signature from typing import ( TYPE_CHECKING, Any, @@ -40,17 +36,15 @@ Collection, Container, Iterable, - Iterator, - MutableSet, Pattern, Sequence, Union, cast, overload, ) -from urllib.parse import urlsplit -import jinja2 +import attrs +import methodtools import pendulum import re2 import sqlalchemy_jsonfield @@ -77,22 +71,16 @@ from sqlalchemy.orm import backref, relationship from sqlalchemy.sql import Select, expression -import airflow.templates from airflow import settings, utils from airflow.api_internal.internal_api_call import internal_api_call -from airflow.assets import Asset, AssetAlias, AssetAll, BaseAsset +from airflow.assets import Asset, AssetAlias, BaseAsset from airflow.configuration import conf as airflow_conf, secrets_backend_list from airflow.exceptions import ( AirflowException, - DuplicateTaskIdFound, - FailStopDagInvalidTriggerRule, - ParamValidationError, TaskDeferred, - TaskNotFound, UnknownExecutorException, ) from airflow.executors.executor_loader import ExecutorLoader -from airflow.models.abstractoperator import AbstractOperator, TaskStateChangeCallback from airflow.models.asset import ( AssetDagRunQueue, AssetModel, @@ -102,7 +90,6 @@ from airflow.models.dagcode import DagCode from airflow.models.dagpickle import DagPickle from airflow.models.dagrun import RUN_ID_REGEX, DagRun -from airflow.models.param import DagParam, ParamsDict from airflow.models.taskinstance import ( Context, TaskInstance, @@ -110,6 +97,7 @@ clear_task_instances, ) from airflow.models.tasklog import LogTemplate +from airflow.sdk import DAG as TaskSDKDag, dag as task_sdk_dag_decorator from airflow.secrets.local_filesystem import LocalFilesystemBackend from airflow.security import permissions from airflow.settings import json @@ -118,36 +106,28 @@ from airflow.timetables.interval import CronDataIntervalTimetable, DeltaDataIntervalTimetable from airflow.timetables.simple import ( AssetTriggeredTimetable, - ContinuousTimetable, NullTimetable, OnceTimetable, ) -from airflow.timetables.trigger import CronTriggerTimetable from airflow.utils import timezone from airflow.utils.dag_cycle_tester import check_cycle -from airflow.utils.decorators import fixup_decorator_warning_stack -from airflow.utils.helpers import exactly_one, validate_instance_args, validate_key +from airflow.utils.helpers import exactly_one from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.sqlalchemy import UtcDateTime, lock_rows, tuple_in_condition, with_row_locks from airflow.utils.state import DagRunState, State, TaskInstanceState -from airflow.utils.trigger_rule import TriggerRule -from airflow.utils.types import NOTSET, DagRunTriggeredByType, DagRunType, EdgeInfoType +from airflow.utils.types import DagRunTriggeredByType, DagRunType if TYPE_CHECKING: - from types import ModuleType - - from pendulum.tz.timezone import FixedTimezone, Timezone from sqlalchemy.orm.query import Query from sqlalchemy.orm.session import Session - from airflow.decorators import TaskDecoratorCollection + from airflow.models.abstractoperator import TaskStateChangeCallback from airflow.models.dagbag import DagBag from airflow.models.operator import Operator from airflow.serialization.pydantic.dag import DagModelPydantic from airflow.serialization.pydantic.dag_run import DagRunPydantic from airflow.typing_compat import Literal - from airflow.utils.task_group import TaskGroup log = logging.getLogger(__name__) @@ -206,24 +186,6 @@ def _get_model_data_interval( return DataInterval(start, end) -def create_timetable(interval: ScheduleInterval, timezone: Timezone | FixedTimezone) -> Timetable: - """Create a Timetable instance from a plain ``schedule`` value.""" - if interval is None: - return NullTimetable() - if interval == "@once": - return OnceTimetable() - if interval == "@continuous": - return ContinuousTimetable() - if isinstance(interval, (timedelta, relativedelta)): - return DeltaDataIntervalTimetable(interval) - if isinstance(interval, str): - if airflow_conf.getboolean("scheduler", "create_cron_data_intervals"): - return CronDataIntervalTimetable(interval, timezone) - else: - return CronTriggerTimetable(interval, timezone=timezone) - raise ValueError(f"{interval!r} is not a valid schedule.") - - def get_last_dagrun(dag_id, session, include_externally_triggered=False): """ Return the last dag run for a dag, None if there was none. @@ -268,12 +230,12 @@ def get_asset_triggered_next_run_info( .join( ADRQ, and_( - ADRQ.dataset_id == DagScheduleAssetReference.dataset_id, + ADRQ.asset_id == DagScheduleAssetReference.asset_id, ADRQ.target_dag_id == DagScheduleAssetReference.dag_id, ), isouter=True, ) - .join(AssetModel, AssetModel.id == DagScheduleAssetReference.dataset_id) + .join(AssetModel, AssetModel.id == DagScheduleAssetReference.asset_id) .group_by(DagScheduleAssetReference.dag_id) .where(DagScheduleAssetReference.dag_id.in_(dag_ids)) ).all() @@ -322,7 +284,7 @@ def _create_orm_dagrun( ) # Load defaults into the following two fields to ensure result can be serialized detached run.log_template_id = int(session.scalar(select(func.max(LogTemplate.__table__.c.id)))) - run.consumed_dataset_events = [] + run.consumed_asset_events = [] session.add(run) session.flush() run.dag = dag @@ -332,34 +294,28 @@ def _create_orm_dagrun( return run -# TODO: The following mapping is used to validate that the arguments passed to the DAG are of the correct -# type. This is a temporary solution until we find a more sophisticated method for argument validation. -# One potential method is to use `get_type_hints` from the typing module. However, this is not fully -# compatible with future annotations for Python versions below 3.10. Once we require a minimum Python -# version that supports `get_type_hints` effectively or find a better approach, we can replace this -# manual type-checking method. -DAG_ARGS_EXPECTED_TYPES = { - "dag_id": str, - "description": str, - "max_active_tasks": int, - "max_active_runs": int, - "max_consecutive_failed_dag_runs": int, - "dagrun_timeout": timedelta, - "default_view": str, - "orientation": str, - "catchup": bool, - "doc_md": str, - "is_paused_upon_creation": bool, - "render_template_as_native_obj": bool, - "tags": Collection, - "auto_register": bool, - "fail_stop": bool, - "dag_display_name": str, -} +if TYPE_CHECKING: + dag = task_sdk_dag_decorator +else: + + def dag(dag_id: str = "", **kwargs): + return task_sdk_dag_decorator(dag_id, __DAG_class=DAG, __warnings_stacklevel_delta=3, **kwargs) + + +def _convert_max_consecutive_failed_dag_runs(val: int) -> int: + if val == 0: + val = airflow_conf.getint("core", "max_consecutive_failed_dag_runs_per_dag") + if val < 0: + raise ValueError( + f"Invalid max_consecutive_failed_dag_runs: {val}." + f"Requires max_consecutive_failed_dag_runs >= 0" + ) + return val @functools.total_ordering -class DAG(LoggingMixin): +@attrs.define(hash=False, repr=False, eq=False, slots=False) +class DAG(TaskSDKDag, LoggingMixin): """ A dag (directed acyclic graph) is a collection of tasks with directional dependencies. @@ -376,7 +332,7 @@ class DAG(LoggingMixin): .. versionadded:: 2.4 The *schedule* argument to specify either time-based scheduling logic - (timetable), or dataset-driven triggers. + (timetable), or asset-driven triggers. .. versionchanged:: 3.0 The default value of *schedule* has been changed to *None* (no schedule). @@ -473,250 +429,30 @@ class DAG(LoggingMixin): :param dag_display_name: The display name of the DAG which appears on the UI. """ - _comps = { - "dag_id", - "task_ids", - "start_date", - "end_date", - "fileloc", - "template_searchpath", - "last_loaded", - } - - __serialized_fields: frozenset[str] | None = None - - fileloc: str - """ - File path that needs to be imported to load this DAG. - - This may not be an actual file on disk in the case when this DAG is loaded - from a ZIP file or other DAG distribution format. - """ - - # NOTE: When updating arguments here, please also keep arguments in @dag() - # below in sync. (Search for 'def dag(' in this file.) - def __init__( - self, - dag_id: str, - description: str | None = None, - schedule: ScheduleArg = None, - start_date: datetime | None = None, - end_date: datetime | None = None, - template_searchpath: str | Iterable[str] | None = None, - template_undefined: type[jinja2.StrictUndefined] = jinja2.StrictUndefined, - user_defined_macros: dict | None = None, - user_defined_filters: dict | None = None, - default_args: dict | None = None, - max_active_tasks: int = airflow_conf.getint("core", "max_active_tasks_per_dag"), - max_active_runs: int = airflow_conf.getint("core", "max_active_runs_per_dag"), - max_consecutive_failed_dag_runs: int = airflow_conf.getint( - "core", "max_consecutive_failed_dag_runs_per_dag" - ), - dagrun_timeout: timedelta | None = None, - sla_miss_callback: Any = None, - default_view: str = airflow_conf.get_mandatory_value("webserver", "dag_default_view").lower(), - orientation: str = airflow_conf.get_mandatory_value("webserver", "dag_orientation"), - catchup: bool = airflow_conf.getboolean("scheduler", "catchup_by_default"), - on_success_callback: None | DagStateChangeCallback | list[DagStateChangeCallback] = None, - on_failure_callback: None | DagStateChangeCallback | list[DagStateChangeCallback] = None, - doc_md: str | None = None, - params: abc.MutableMapping | None = None, - access_control: dict[str, dict[str, Collection[str]]] | dict[str, Collection[str]] | None = None, - is_paused_upon_creation: bool | None = None, - jinja_environment_kwargs: dict | None = None, - render_template_as_native_obj: bool = False, - tags: Collection[str] | None = None, - owner_links: dict[str, str] | None = None, - auto_register: bool = True, - fail_stop: bool = False, - dag_display_name: str | None = None, - ): - from airflow.utils.task_group import TaskGroup - - if tags and any(len(tag) > TAG_MAX_LEN for tag in tags): - raise AirflowException(f"tag cannot be longer than {TAG_MAX_LEN} characters") - - self.owner_links = owner_links or {} - self.user_defined_macros = user_defined_macros - self.user_defined_filters = user_defined_filters - if default_args and not isinstance(default_args, dict): - raise TypeError("default_args must be a dict") - self.default_args = copy.deepcopy(default_args or {}) - params = params or {} - - # merging potentially conflicting default_args['params'] into params - if "params" in self.default_args: - params.update(self.default_args["params"]) - del self.default_args["params"] - - # check self.params and convert them into ParamsDict - self.params = ParamsDict(params) - - validate_key(dag_id) - - self._dag_id = dag_id - self._dag_display_property_value = dag_display_name - - self._max_active_tasks = max_active_tasks - self._pickle_id: int | None = None - - self._description = description - # set file location to caller source path - back = sys._getframe().f_back - self.fileloc = back.f_code.co_filename if back else "" - self.task_dict: dict[str, Operator] = {} - - # set timezone from start_date - tz = None - if start_date and start_date.tzinfo: - tzinfo = None if start_date.tzinfo else settings.TIMEZONE - tz = pendulum.instance(start_date, tz=tzinfo).timezone - elif date := self.default_args.get("start_date"): - if not isinstance(date, datetime): - date = timezone.parse(date) - self.default_args["start_date"] = date - start_date = date - - tzinfo = None if date.tzinfo else settings.TIMEZONE - tz = pendulum.instance(date, tz=tzinfo).timezone - self.timezone: Timezone | FixedTimezone = tz or settings.TIMEZONE - - # Apply the timezone we settled on to end_date if it wasn't supplied - if isinstance(_end_date := self.default_args.get("end_date"), str): - self.default_args["end_date"] = timezone.parse(_end_date, timezone=self.timezone) - - self.start_date = timezone.convert_to_utc(start_date) - self.end_date = timezone.convert_to_utc(end_date) - - # also convert tasks - if "start_date" in self.default_args: - self.default_args["start_date"] = timezone.convert_to_utc(self.default_args["start_date"]) - if "end_date" in self.default_args: - self.default_args["end_date"] = timezone.convert_to_utc(self.default_args["end_date"]) - - if isinstance(schedule, Timetable): - self.timetable = schedule - elif isinstance(schedule, BaseAsset): - self.timetable = AssetTriggeredTimetable(schedule) - elif isinstance(schedule, Collection) and not isinstance(schedule, str): - if not all(isinstance(x, (Asset, AssetAlias)) for x in schedule): - raise ValueError("All elements in 'schedule' should be assets or asset aliases") - self.timetable = AssetTriggeredTimetable(AssetAll(*schedule)) - else: - self.timetable = create_timetable(schedule, self.timezone) - - requires_automatic_backfilling = self.timetable.can_be_scheduled and catchup - if requires_automatic_backfilling and not ("start_date" in self.default_args or self.start_date): - raise ValueError("start_date is required when catchup=True") - - if isinstance(template_searchpath, str): - template_searchpath = [template_searchpath] - self.template_searchpath = template_searchpath - self.template_undefined = template_undefined - self.last_loaded: datetime = timezone.utcnow() - self.safe_dag_id = dag_id.replace(".", "__dot__") - self.max_active_runs = max_active_runs - self.max_consecutive_failed_dag_runs = max_consecutive_failed_dag_runs - if self.max_consecutive_failed_dag_runs == 0: - self.max_consecutive_failed_dag_runs = airflow_conf.getint( - "core", "max_consecutive_failed_dag_runs_per_dag" - ) - if self.max_consecutive_failed_dag_runs < 0: - raise AirflowException( - f"Invalid max_consecutive_failed_dag_runs: {self.max_consecutive_failed_dag_runs}." - f"Requires max_consecutive_failed_dag_runs >= 0" - ) - if self.timetable.active_runs_limit is not None: - if self.timetable.active_runs_limit < self.max_active_runs: - raise AirflowException( - f"Invalid max_active_runs: {type(self.timetable)} " - f"requires max_active_runs <= {self.timetable.active_runs_limit}" - ) - self.dagrun_timeout = dagrun_timeout - if sla_miss_callback: - log.warning( - "The SLA feature is removed in Airflow 3.0, to be replaced with a new implementation in 3.1" - ) - if default_view in DEFAULT_VIEW_PRESETS: - self._default_view: str = default_view - else: - raise AirflowException( - f"Invalid values of dag.default_view: only support " - f"{DEFAULT_VIEW_PRESETS}, but get {default_view}" - ) - if orientation in ORIENTATION_PRESETS: - self.orientation = orientation - else: - raise AirflowException( - f"Invalid values of dag.orientation: only support " - f"{ORIENTATION_PRESETS}, but get {orientation}" - ) - self.catchup: bool = catchup - - self.partial: bool = False - self.on_success_callback = on_success_callback - self.on_failure_callback = on_failure_callback - - # Keeps track of any extra edge metadata (sparse; will not contain all - # edges, so do not iterate over it for that). Outer key is upstream - # task ID, inner key is downstream task ID. - self.edge_info: dict[str, dict[str, EdgeInfoType]] = {} - - # To keep it in parity with Serialized DAGs - # and identify if DAG has on_*_callback without actually storing them in Serialized JSON - self.has_on_success_callback: bool = self.on_success_callback is not None - self.has_on_failure_callback: bool = self.on_failure_callback is not None - - self._access_control = DAG._upgrade_outdated_dag_access_control(access_control) - self.is_paused_upon_creation = is_paused_upon_creation - self.auto_register = auto_register - - self.fail_stop: bool = fail_stop - - self.jinja_environment_kwargs = jinja_environment_kwargs - self.render_template_as_native_obj = render_template_as_native_obj + partial: bool = False + last_loaded: datetime | None = attrs.field(factory=timezone.utcnow) - self.doc_md = self.get_doc_md(doc_md) + default_view: str = airflow_conf.get_mandatory_value("webserver", "dag_default_view").lower() + orientation: str = airflow_conf.get_mandatory_value("webserver", "dag_orientation") - self.tags: MutableSet[str] = set(tags or []) - self._task_group = TaskGroup.create_root(self) - self.validate_schedule_and_params() - wrong_links = dict(self.iter_invalid_owner_links()) - if wrong_links: - raise AirflowException( - "Wrong link format was used for the owner. Use a valid link \n" - f"Bad formatted links are: {wrong_links}" - ) - - # this will only be set at serialization time - # it's only use is for determining the relative - # fileloc based only on the serialize dag - self._processor_dags_folder = None - - validate_instance_args(self, DAG_ARGS_EXPECTED_TYPES) + # this will only be set at serialization time + # it's only use is for determining the relative fileloc based only on the serialize dag + _processor_dags_folder: str | None = attrs.field(init=False, default=None) - def get_doc_md(self, doc_md: str | None) -> str | None: - if doc_md is None: - return doc_md - - if doc_md.endswith(".md"): - try: - return open(doc_md).read() - except FileNotFoundError: - return doc_md + # Override the default from parent class to use config + max_consecutive_failed_dag_runs: int = attrs.field( + default=0, + converter=_convert_max_consecutive_failed_dag_runs, + validator=attrs.validators.instance_of(int), + ) - return doc_md + @property + def safe_dag_id(self): + return self.dag_id.replace(".", "__dot__") def validate(self): - """ - Validate the DAG has a coherent setup. - - This is called by the DAG bag before bagging the DAG. - """ + super().validate() self.validate_executor_field() - self.validate_schedule_and_params() - self.timetable.validate() - self.validate_setup_teardown() def validate_executor_field(self): for task in self.tasks: @@ -730,63 +466,6 @@ def validate_executor_field(self): "update the executor configuration for this task." ) - def validate_setup_teardown(self): - """ - Validate that setup and teardown tasks are configured properly. - - :meta private: - """ - for task in self.tasks: - if task.is_setup: - for down_task in task.downstream_list: - if not down_task.is_teardown and down_task.trigger_rule != TriggerRule.ALL_SUCCESS: - # todo: we can relax this to allow out-of-scope tasks to have other trigger rules - # this is required to ensure consistent behavior of dag - # when clearing an indirect setup - raise ValueError("Setup tasks must be followed with trigger rule ALL_SUCCESS.") - FailStopDagInvalidTriggerRule.check(dag=self, trigger_rule=task.trigger_rule) - - def __repr__(self): - return f"" - - def __eq__(self, other): - if type(self) is type(other): - # Use getattr() instead of __dict__ as __dict__ doesn't return - # correct values for properties. - return all(getattr(self, c, None) == getattr(other, c, None) for c in self._comps) - return False - - def __ne__(self, other): - return not self == other - - def __lt__(self, other): - return self.dag_id < other.dag_id - - def __hash__(self): - hash_components = [type(self)] - for c in self._comps: - # task_ids returns a list and lists can't be hashed - if c == "task_ids": - val = tuple(self.task_dict) - else: - val = getattr(self, c, None) - try: - hash(val) - hash_components.append(val) - except TypeError: - hash_components.append(repr(val)) - return hash(tuple(hash_components)) - - # Context Manager ----------------------------------------------- - def __enter__(self): - DagContext.push_context_managed_dag(self) - return self - - def __exit__(self, _type, _value, _tb): - DagContext.pop_context_managed_dag() - - # /Context Manager ---------------------------------------------- - @staticmethod def _upgrade_outdated_dag_access_control(access_control=None): """Look for outdated dag level actions in DAG access_controls and replace them with updated actions.""" @@ -951,7 +630,7 @@ def _time_restriction(self) -> TimeRestriction: earliest = None if start_dates: earliest = timezone.coerce_datetime(min(start_dates)) - latest = self.end_date + latest = timezone.coerce_datetime(self.end_date) end_dates = [t.end_date for t in self.tasks if t.end_date] if len(end_dates) == len(self.tasks): # not exists null end_date if self.end_date is not None: @@ -962,8 +641,8 @@ def _time_restriction(self) -> TimeRestriction: def iter_dagrun_infos_between( self, - earliest: pendulum.DateTime | None, - latest: pendulum.DateTime, + earliest: pendulum.DateTime | datetime | None, + latest: pendulum.DateTime | datetime, *, align: bool = True, ) -> Iterable[DagRunInfo]: @@ -1060,34 +739,6 @@ def dag_id(self, value: str) -> None: def timetable_summary(self) -> str: return self.timetable.summary - @property - def max_active_tasks(self) -> int: - return self._max_active_tasks - - @max_active_tasks.setter - def max_active_tasks(self, value: int): - self._max_active_tasks = value - - @property - def access_control(self): - return self._access_control - - @access_control.setter - def access_control(self, value): - self._access_control = DAG._upgrade_outdated_dag_access_control(value) - - @property - def dag_display_name(self) -> str: - return self._dag_display_property_value or self._dag_id - - @property - def description(self) -> str | None: - return self._description - - @property - def default_view(self) -> str: - return self._default_view - @property def pickle_id(self) -> int | None: return self._pickle_id @@ -1096,41 +747,6 @@ def pickle_id(self) -> int | None: def pickle_id(self, value: int) -> None: self._pickle_id = value - def param(self, name: str, default: Any = NOTSET) -> DagParam: - """ - Return a DagParam object for current dag. - - :param name: dag parameter name. - :param default: fallback value for dag parameter. - :return: DagParam instance for specified name and current dag. - """ - return DagParam(current_dag=self, name=name, default=default) - - @property - def tasks(self) -> list[Operator]: - return list(self.task_dict.values()) - - @tasks.setter - def tasks(self, val): - raise AttributeError("DAG.tasks can not be modified. Use dag.add_task() instead.") - - @property - def task_ids(self) -> list[str]: - return list(self.task_dict) - - @property - def teardowns(self) -> list[Operator]: - return [task for task in self.tasks if getattr(task, "is_teardown", None)] - - @property - def tasks_upstream_of_teardowns(self) -> list[Operator]: - upstream_tasks = [t.upstream_list for t in self.teardowns] - return [val for sublist in upstream_tasks for val in sublist if not getattr(val, "is_teardown", None)] - - @property - def task_group(self) -> TaskGroup: - return self._task_group - @property def relative_fileloc(self) -> pathlib.Path: """File location of the importable dag 'file' relative to the configured DAGs folder.""" @@ -1145,24 +761,6 @@ def relative_fileloc(self) -> pathlib.Path: # Not relative to DAGS_FOLDER. return path - @property - def folder(self) -> str: - """Folder location of where the DAG object is instantiated.""" - return os.path.dirname(self.fileloc) - - @property - def owner(self) -> str: - """ - Return list of all owners found in DAG tasks. - - :return: Comma separated list of owners in DAG tasks - """ - return ", ".join({t.owner for t in self.tasks}) - - @property - def allow_future_exec_dates(self) -> bool: - return settings.ALLOW_FUTURE_EXEC_DATES and not self.timetable.can_be_scheduled - @provide_session def get_concurrency_reached(self, session=NEW_SESSION) -> bool: """Return a boolean indicating whether the max_active_tasks limit for this DAG has been reached.""" @@ -1185,6 +783,14 @@ def get_is_paused(self, session=NEW_SESSION) -> None: """Return a boolean indicating whether this DAG is paused.""" return session.scalar(select(DagModel.is_paused).where(DagModel.dag_id == self.dag_id)) + @methodtools.lru_cache(maxsize=None) + @classmethod + def get_serialized_fields(cls): + """Stringified DAGs and operators contain exactly these fields.""" + return TaskSDKDag.get_serialized_fields() | { + "_processor_dags_folder", + } + @staticmethod @internal_api_call @provide_session @@ -1346,45 +952,6 @@ def get_latest_execution_date(self, session: Session = NEW_SESSION) -> pendulum. """Return the latest date for which at least one dag run exists.""" return session.scalar(select(func.max(DagRun.execution_date)).where(DagRun.dag_id == self.dag_id)) - def resolve_template_files(self): - for t in self.tasks: - t.resolve_template_files() - - def get_template_env(self, *, force_sandboxed: bool = False) -> jinja2.Environment: - """Build a Jinja2 environment.""" - # Collect directories to search for template files - searchpath = [self.folder] - if self.template_searchpath: - searchpath += self.template_searchpath - - # Default values (for backward compatibility) - jinja_env_options = { - "loader": jinja2.FileSystemLoader(searchpath), - "undefined": self.template_undefined, - "extensions": ["jinja2.ext.do"], - "cache_size": 0, - } - if self.jinja_environment_kwargs: - jinja_env_options.update(self.jinja_environment_kwargs) - env: jinja2.Environment - if self.render_template_as_native_obj and not force_sandboxed: - env = airflow.templates.NativeEnvironment(**jinja_env_options) - else: - env = airflow.templates.SandboxedEnvironment(**jinja_env_options) - - # Add any user defined items. Safe to edit globals as long as no templates are rendered yet. - # http://jinja.pocoo.org/docs/2.10/api/#jinja2.Environment.globals - if self.user_defined_macros: - env.globals.update(self.user_defined_macros) - if self.user_defined_filters: - env.filters.update(self.user_defined_filters) - - return env - - def set_dependency(self, upstream_task_id, downstream_task_id): - """Set dependency between two tasks that already have been added to the DAG using add_task().""" - self.get_task(upstream_task_id).set_downstream(self.get_task(downstream_task_id)) - @provide_session def get_task_instances_before( self, @@ -1849,33 +1416,6 @@ def set_task_group_state( return altered - @property - def roots(self) -> list[Operator]: - """Return nodes with no parents. These are first to execute and are called roots or root nodes.""" - return [task for task in self.tasks if not task.upstream_list] - - @property - def leaves(self) -> list[Operator]: - """Return nodes with no children. These are last to execute and are called leaves or leaf nodes.""" - return [task for task in self.tasks if not task.downstream_list] - - def topological_sort(self): - """ - Sorts tasks in topographical order, such that a task comes after any of its upstream dependencies. - - Deprecated in place of ``task_group.topological_sort`` - """ - from airflow.utils.task_group import TaskGroup - - def nested_topo(group): - for node in group.topological_sort(): - if isinstance(node, TaskGroup): - yield from nested_topo(node) - else: - yield node - - return tuple(nested_topo(self.task_group)) - @provide_session def clear( self, @@ -2009,169 +1549,6 @@ def clear_dags( print("Cancelled, nothing was cleared.") return count - def __deepcopy__(self, memo): - # Switcharoo to go around deepcopying objects coming through the - # backdoor - cls = self.__class__ - result = cls.__new__(cls) - memo[id(self)] = result - for k, v in self.__dict__.items(): - if k not in ("user_defined_macros", "user_defined_filters", "_log"): - setattr(result, k, copy.deepcopy(v, memo)) - - result.user_defined_macros = self.user_defined_macros - result.user_defined_filters = self.user_defined_filters - if hasattr(self, "_log"): - result._log = self._log - return result - - def partial_subset( - self, - task_ids_or_regex: str | Pattern | Iterable[str], - include_downstream=False, - include_upstream=True, - include_direct_upstream=False, - ): - """ - Return a subset of the current dag based on regex matching one or more tasks. - - Returns a subset of the current dag as a deep copy of the current dag - based on a regex that should match one or many tasks, and includes - upstream and downstream neighbours based on the flag passed. - - :param task_ids_or_regex: Either a list of task_ids, or a regex to - match against task ids (as a string, or compiled regex pattern). - :param include_downstream: Include all downstream tasks of matched - tasks, in addition to matched tasks. - :param include_upstream: Include all upstream tasks of matched tasks, - in addition to matched tasks. - :param include_direct_upstream: Include all tasks directly upstream of matched - and downstream (if include_downstream = True) tasks - """ - from airflow.models.baseoperator import BaseOperator - from airflow.models.mappedoperator import MappedOperator - - # deep-copying self.task_dict and self._task_group takes a long time, and we don't want all - # the tasks anyway, so we copy the tasks manually later - memo = {id(self.task_dict): None, id(self._task_group): None} - dag = copy.deepcopy(self, memo) # type: ignore - - if isinstance(task_ids_or_regex, (str, Pattern)): - matched_tasks = [t for t in self.tasks if re2.findall(task_ids_or_regex, t.task_id)] - else: - matched_tasks = [t for t in self.tasks if t.task_id in task_ids_or_regex] - - also_include_ids: set[str] = set() - for t in matched_tasks: - if include_downstream: - for rel in t.get_flat_relatives(upstream=False): - also_include_ids.add(rel.task_id) - if rel not in matched_tasks: # if it's in there, we're already processing it - # need to include setups and teardowns for tasks that are in multiple - # non-collinear setup/teardown paths - if not rel.is_setup and not rel.is_teardown: - also_include_ids.update( - x.task_id for x in rel.get_upstreams_only_setups_and_teardowns() - ) - if include_upstream: - also_include_ids.update(x.task_id for x in t.get_upstreams_follow_setups()) - else: - if not t.is_setup and not t.is_teardown: - also_include_ids.update(x.task_id for x in t.get_upstreams_only_setups_and_teardowns()) - if t.is_setup and not include_downstream: - also_include_ids.update(x.task_id for x in t.downstream_list if x.is_teardown) - - also_include: list[Operator] = [self.task_dict[x] for x in also_include_ids] - direct_upstreams: list[Operator] = [] - if include_direct_upstream: - for t in itertools.chain(matched_tasks, also_include): - upstream = (u for u in t.upstream_list if isinstance(u, (BaseOperator, MappedOperator))) - direct_upstreams.extend(upstream) - - # Compiling the unique list of tasks that made the cut - # Make sure to not recursively deepcopy the dag or task_group while copying the task. - # task_group is reset later - def _deepcopy_task(t) -> Operator: - memo.setdefault(id(t.task_group), None) - return copy.deepcopy(t, memo) - - dag.task_dict = { - t.task_id: _deepcopy_task(t) - for t in itertools.chain(matched_tasks, also_include, direct_upstreams) - } - - def filter_task_group(group, parent_group): - """Exclude tasks not included in the subdag from the given TaskGroup.""" - # We want to deepcopy _most but not all_ attributes of the task group, so we create a shallow copy - # and then manually deep copy the instances. (memo argument to deepcopy only works for instances - # of classes, not "native" properties of an instance) - copied = copy.copy(group) - - memo[id(group.children)] = {} - if parent_group: - memo[id(group.parent_group)] = parent_group - for attr, value in copied.__dict__.items(): - if id(value) in memo: - value = memo[id(value)] - else: - value = copy.deepcopy(value, memo) - copied.__dict__[attr] = value - - proxy = weakref.proxy(copied) - - for child in group.children.values(): - if isinstance(child, AbstractOperator): - if child.task_id in dag.task_dict: - task = copied.children[child.task_id] = dag.task_dict[child.task_id] - task.task_group = proxy - else: - copied.used_group_ids.discard(child.task_id) - else: - filtered_child = filter_task_group(child, proxy) - - # Only include this child TaskGroup if it is non-empty. - if filtered_child.children: - copied.children[child.group_id] = filtered_child - - return copied - - dag._task_group = filter_task_group(self.task_group, None) - - # Removing upstream/downstream references to tasks and TaskGroups that did not make - # the cut. - subdag_task_groups = dag.task_group.get_task_group_dict() - for group in subdag_task_groups.values(): - group.upstream_group_ids.intersection_update(subdag_task_groups) - group.downstream_group_ids.intersection_update(subdag_task_groups) - group.upstream_task_ids.intersection_update(dag.task_dict) - group.downstream_task_ids.intersection_update(dag.task_dict) - - for t in dag.tasks: - # Removing upstream/downstream references to tasks that did not - # make the cut - t.upstream_task_ids.intersection_update(dag.task_dict) - t.downstream_task_ids.intersection_update(dag.task_dict) - - if len(dag.tasks) < len(self.tasks): - dag.partial = True - - return dag - - def has_task(self, task_id: str): - return task_id in self.task_dict - - def has_task_group(self, task_group_id: str) -> bool: - return task_group_id in self.task_group_dict - - @functools.cached_property - def task_group_dict(self): - return {k: v for k, v in self._task_group.get_task_group_dict().items() if k is not None} - - def get_task(self, task_id: str) -> Operator: - if task_id in self.task_dict: - return self.task_dict[task_id] - raise TaskNotFound(f"Task {task_id} not found") - def pickle_info(self): d = {} d["is_picklable"] = True @@ -2201,76 +1578,6 @@ def pickle(self, session=NEW_SESSION) -> DagPickle: return dp - @property - def task(self) -> TaskDecoratorCollection: - from airflow.decorators import task - - return cast("TaskDecoratorCollection", functools.partial(task, dag=self)) - - def add_task(self, task: Operator) -> None: - """ - Add a task to the DAG. - - :param task: the task you want to add - """ - FailStopDagInvalidTriggerRule.check(dag=self, trigger_rule=task.trigger_rule) - - from airflow.utils.task_group import TaskGroupContext - - # if the task has no start date, assign it the same as the DAG - if not task.start_date: - task.start_date = self.start_date - # otherwise, the task will start on the later of its own start date and - # the DAG's start date - elif self.start_date: - task.start_date = max(task.start_date, self.start_date) - - # if the task has no end date, assign it the same as the dag - if not task.end_date: - task.end_date = self.end_date - # otherwise, the task will end on the earlier of its own end date and - # the DAG's end date - elif task.end_date and self.end_date: - task.end_date = min(task.end_date, self.end_date) - - task_id = task.task_id - if not task.task_group: - task_group = TaskGroupContext.get_current_task_group(self) - if task_group: - task_id = task_group.child_id(task_id) - task_group.add(task) - - if ( - task_id in self.task_dict and self.task_dict[task_id] is not task - ) or task_id in self._task_group.used_group_ids: - raise DuplicateTaskIdFound(f"Task id '{task_id}' has already been added to the DAG") - else: - self.task_dict[task_id] = task - task.dag = self - # Add task_id to used_group_ids to prevent group_id and task_id collisions. - self._task_group.used_group_ids.add(task_id) - - self.task_count = len(self.task_dict) - - def add_tasks(self, tasks: Iterable[Operator]) -> None: - """ - Add a list of tasks to the DAG. - - :param tasks: a lit of tasks you want to add - """ - for task in tasks: - self.add_task(task) - - def _remove_task(self, task_id: str) -> None: - # This is "private" as removing could leave a hole in dependencies if done incorrectly, and this - # doesn't guard against that - task = self.task_dict.pop(task_id) - tg = getattr(task, "task_group", None) - if tg: - tg._remove(task) - - self.task_count = len(self.task_dict) - def cli(self): """Exposes a CLI specific to this DAG.""" check_cycle(self) @@ -2515,6 +1822,9 @@ def create_dagrun( # todo: AIP-78 add verification that if run type is backfill then we have a backfill id + if TYPE_CHECKING: + # TODO: Task-SDK: remove this assert + assert self.params # create a copy of params before validating copied_params = copy.deepcopy(self.params) copied_params.update(conf or {}) @@ -2571,7 +1881,6 @@ def bulk_write_to_db( orm_asset_aliases = asset_op.add_asset_aliases(session=session) session.flush() # This populates id so we can create fks in later calls. - asset_op.add_asset_active_references(orm_assets.values(), session=session) asset_op.add_dag_asset_references(orm_dags, orm_assets, session=session) asset_op.add_dag_asset_alias_references(orm_dags, orm_asset_aliases, session=session) asset_op.add_task_asset_references(orm_dags, orm_assets, session=session) @@ -2670,88 +1979,6 @@ def get_num_task_instances(dag_id, run_id=None, task_ids=None, states=None, sess qry = qry.where(TaskInstance.state.in_(states)) return session.scalar(qry) - @classmethod - def get_serialized_fields(cls): - """Stringified DAGs and operators contain exactly these fields.""" - if not cls.__serialized_fields: - exclusion_list = { - "schedule_dataset_references", - "schedule_dataset_alias_references", - "task_outlet_dataset_references", - "_old_context_manager_dags", - "safe_dag_id", - "last_loaded", - "user_defined_filters", - "user_defined_macros", - "partial", - "params", - "_pickle_id", - "_log", - "task_dict", - "template_searchpath", - "sla_miss_callback", - "on_success_callback", - "on_failure_callback", - "template_undefined", - "jinja_environment_kwargs", - # has_on_*_callback are only stored if the value is True, as the default is False - "has_on_success_callback", - "has_on_failure_callback", - "auto_register", - "fail_stop", - } - cls.__serialized_fields = frozenset(vars(DAG(dag_id="test", schedule=None))) - exclusion_list - return cls.__serialized_fields - - def get_edge_info(self, upstream_task_id: str, downstream_task_id: str) -> EdgeInfoType: - """Return edge information for the given pair of tasks or an empty edge if there is no information.""" - # Note - older serialized DAGs may not have edge_info being a dict at all - empty = cast(EdgeInfoType, {}) - if self.edge_info: - return self.edge_info.get(upstream_task_id, {}).get(downstream_task_id, empty) - else: - return empty - - def set_edge_info(self, upstream_task_id: str, downstream_task_id: str, info: EdgeInfoType): - """ - Set the given edge information on the DAG. - - Note that this will overwrite, rather than merge with, existing info. - """ - self.edge_info.setdefault(upstream_task_id, {})[downstream_task_id] = info - - def validate_schedule_and_params(self): - """ - Validate Param values when the DAG has schedule defined. - - Raise exception if there are any Params which can not be resolved by their schema definition. - """ - if not self.timetable.can_be_scheduled: - return - - try: - self.params.validate() - except ParamValidationError as pverr: - raise AirflowException( - "DAG is not allowed to define a Schedule, " - "if there are any required params without default values or default values are not valid." - ) from pverr - - def iter_invalid_owner_links(self) -> Iterator[tuple[str, str]]: - """ - Parse a given link, and verifies if it's a valid URL, or a 'mailto' link. - - Returns an iterator of invalid (owner, link) pairs. - """ - for owner, link in self.owner_links.items(): - result = urlsplit(link) - if result.scheme == "mailto": - # netloc is not existing for 'mailto' link, so we are checking that the path is parsed - if not result.path: - yield result.path, link - elif not result.scheme or not result.netloc: - yield owner, link - class DagTag(Base): """A tag name per dag, to allow quick filtering in the DAG view.""" @@ -2819,8 +2046,6 @@ class DagModel(Base): # Time when the DAG last received a refresh signal # (e.g. the DAG's "refresh" button was clicked in the web UI) last_expired = Column(UtcDateTime) - # Whether (one of) the scheduler is scheduling this DAG at the moment - scheduler_lock = Column(Boolean) # Foreign key to the latest pickle_id pickle_id = Column(Integer) # The location of the file containing the DAG object @@ -2842,8 +2067,8 @@ class DagModel(Base): timetable_summary = Column(Text, nullable=True) # Timetable description timetable_description = Column(String(1000), nullable=True) - # Dataset expression based on dataset triggers - dataset_expression = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) + # Asset expression based on asset triggers + asset_expression = Column(sqlalchemy_jsonfield.JSONField(json=json), nullable=True) # Tags for view filter tags = relationship("DagTag", cascade="all, delete, delete-orphan", backref=backref("dag")) # Dag owner links for DAGs view @@ -2870,18 +2095,18 @@ class DagModel(Base): __table_args__ = (Index("idx_next_dagrun_create_after", next_dagrun_create_after, unique=False),) - schedule_dataset_references = relationship( + schedule_asset_references = relationship( "DagScheduleAssetReference", back_populates="dag", cascade="all, delete, delete-orphan", ) - schedule_dataset_alias_references = relationship( + schedule_asset_alias_references = relationship( "DagScheduleAssetAliasReference", back_populates="dag", cascade="all, delete, delete-orphan", ) - schedule_datasets = association_proxy("schedule_dataset_references", "dataset") - task_outlet_dataset_references = relationship( + schedule_assets = association_proxy("schedule_asset_references", "asset") + task_outlet_asset_references = relationship( "TaskOutletAssetReference", cascade="all, delete, delete-orphan", ) @@ -3093,7 +2318,7 @@ def dag_ready(dag_id: str, cond: BaseAsset, statuses: dict) -> bool | None: del all_records dag_statuses = {} for dag_id, records in by_dag.items(): - dag_statuses[dag_id] = {x.dataset.uri: True for x in records} + dag_statuses[dag_id] = {x.asset.uri: True for x in records} ser_dags = session.scalars( select(SerializedDagModel).where(SerializedDagModel.dag_id.in_(dag_statuses.keys())) ).all() @@ -3105,27 +2330,27 @@ def dag_ready(dag_id: str, cond: BaseAsset, statuses: dict) -> bool | None: del by_dag[dag_id] del dag_statuses[dag_id] del dag_statuses - dataset_triggered_dag_info = {} + asset_triggered_dag_info = {} for dag_id, records in by_dag.items(): times = sorted(x.created_at for x in records) - dataset_triggered_dag_info[dag_id] = (times[0], times[-1]) + asset_triggered_dag_info[dag_id] = (times[0], times[-1]) del by_dag - dataset_triggered_dag_ids = set(dataset_triggered_dag_info.keys()) - if dataset_triggered_dag_ids: + asset_triggered_dag_ids = set(asset_triggered_dag_info.keys()) + if asset_triggered_dag_ids: exclusion_list = set( session.scalars( select(DagModel.dag_id) .join(DagRun.dag_model) .where(DagRun.state.in_((DagRunState.QUEUED, DagRunState.RUNNING))) - .where(DagModel.dag_id.in_(dataset_triggered_dag_ids)) + .where(DagModel.dag_id.in_(asset_triggered_dag_ids)) .group_by(DagModel.dag_id) .having(func.count() >= func.max(DagModel.max_active_runs)) ) ) if exclusion_list: - dataset_triggered_dag_ids -= exclusion_list - dataset_triggered_dag_info = { - k: v for k, v in dataset_triggered_dag_info.items() if k not in exclusion_list + asset_triggered_dag_ids -= exclusion_list + asset_triggered_dag_info = { + k: v for k, v in asset_triggered_dag_info.items() if k not in exclusion_list } # We limit so that _one_ scheduler doesn't try to do all the creation of dag runs @@ -3137,7 +2362,7 @@ def dag_ready(dag_id: str, cond: BaseAsset, statuses: dict) -> bool | None: cls.has_import_errors == expression.false(), or_( cls.next_dagrun_create_after <= func.now(), - cls.dag_id.in_(dataset_triggered_dag_ids), + cls.dag_id.in_(asset_triggered_dag_ids), ), ) .order_by(cls.next_dagrun_create_after) @@ -3146,7 +2371,7 @@ def dag_ready(dag_id: str, cond: BaseAsset, statuses: dict) -> bool | None: return ( session.scalars(with_row_locks(query, of=cls, session=session, skip_locked=True)), - dataset_triggered_dag_info, + asset_triggered_dag_info, ) def calculate_dagrun_date_fields( @@ -3186,7 +2411,7 @@ def calculate_dagrun_date_fields( @provide_session def get_asset_triggered_next_run_info(self, *, session=NEW_SESSION) -> dict[str, int | str] | None: - if self.dataset_expression is None: + if self.asset_expression is None: return None # When an asset alias does not resolve into assets, get_asset_triggered_next_run_info returns @@ -3194,123 +2419,6 @@ def get_asset_triggered_next_run_info(self, *, session=NEW_SESSION) -> dict[str, return get_asset_triggered_next_run_info([self.dag_id], session=session).get(self.dag_id, None) -# NOTE: Please keep the list of arguments in sync with DAG.__init__. -# Only exception: dag_id here should have a default value, but not in DAG. -def dag( - dag_id: str = "", - description: str | None = None, - schedule: ScheduleArg = None, - start_date: datetime | None = None, - end_date: datetime | None = None, - template_searchpath: str | Iterable[str] | None = None, - template_undefined: type[jinja2.StrictUndefined] = jinja2.StrictUndefined, - user_defined_macros: dict | None = None, - user_defined_filters: dict | None = None, - default_args: dict | None = None, - max_active_tasks: int = airflow_conf.getint("core", "max_active_tasks_per_dag"), - max_active_runs: int = airflow_conf.getint("core", "max_active_runs_per_dag"), - max_consecutive_failed_dag_runs: int = airflow_conf.getint( - "core", "max_consecutive_failed_dag_runs_per_dag" - ), - dagrun_timeout: timedelta | None = None, - sla_miss_callback: Any = None, - default_view: str = airflow_conf.get_mandatory_value("webserver", "dag_default_view").lower(), - orientation: str = airflow_conf.get_mandatory_value("webserver", "dag_orientation"), - catchup: bool = airflow_conf.getboolean("scheduler", "catchup_by_default"), - on_success_callback: None | DagStateChangeCallback | list[DagStateChangeCallback] = None, - on_failure_callback: None | DagStateChangeCallback | list[DagStateChangeCallback] = None, - doc_md: str | None = None, - params: abc.MutableMapping | None = None, - access_control: dict[str, dict[str, Collection[str]]] | dict[str, Collection[str]] | None = None, - is_paused_upon_creation: bool | None = None, - jinja_environment_kwargs: dict | None = None, - render_template_as_native_obj: bool = False, - tags: Collection[str] | None = None, - owner_links: dict[str, str] | None = None, - auto_register: bool = True, - fail_stop: bool = False, - dag_display_name: str | None = None, -) -> Callable[[Callable], Callable[..., DAG]]: - """ - Python dag decorator which wraps a function into an Airflow DAG. - - Accepts kwargs for operator kwarg. Can be used to parameterize DAGs. - - :param dag_args: Arguments for DAG object - :param dag_kwargs: Kwargs for DAG object. - """ - - def wrapper(f: Callable) -> Callable[..., DAG]: - @functools.wraps(f) - def factory(*args, **kwargs): - # Generate signature for decorated function and bind the arguments when called - # we do this to extract parameters, so we can annotate them on the DAG object. - # In addition, this fails if we are missing any args/kwargs with TypeError as expected. - f_sig = signature(f).bind(*args, **kwargs) - # Apply defaults to capture default values if set. - f_sig.apply_defaults() - - # Initialize DAG with bound arguments - with DAG( - dag_id or f.__name__, - description=description, - start_date=start_date, - end_date=end_date, - template_searchpath=template_searchpath, - template_undefined=template_undefined, - user_defined_macros=user_defined_macros, - user_defined_filters=user_defined_filters, - default_args=default_args, - max_active_tasks=max_active_tasks, - max_active_runs=max_active_runs, - max_consecutive_failed_dag_runs=max_consecutive_failed_dag_runs, - dagrun_timeout=dagrun_timeout, - sla_miss_callback=sla_miss_callback, - default_view=default_view, - orientation=orientation, - catchup=catchup, - on_success_callback=on_success_callback, - on_failure_callback=on_failure_callback, - doc_md=doc_md, - params=params, - access_control=access_control, - is_paused_upon_creation=is_paused_upon_creation, - jinja_environment_kwargs=jinja_environment_kwargs, - render_template_as_native_obj=render_template_as_native_obj, - tags=tags, - schedule=schedule, - owner_links=owner_links, - auto_register=auto_register, - fail_stop=fail_stop, - dag_display_name=dag_display_name, - ) as dag_obj: - # Set DAG documentation from function documentation if it exists and doc_md is not set. - if f.__doc__ and not dag_obj.doc_md: - dag_obj.doc_md = f.__doc__ - - # Generate DAGParam for each function arg/kwarg and replace it for calling the function. - # All args/kwargs for function will be DAGParam object and replaced on execution time. - f_kwargs = {} - for name, value in f_sig.arguments.items(): - f_kwargs[name] = dag_obj.param(name, value) - - # set file location to caller source path - back = sys._getframe().f_back - dag_obj.fileloc = back.f_code.co_filename if back else "" - - # Invoke function to create operators in the DAG scope. - f(**f_kwargs) - - # Return dag object such that it's accessible in Globals. - return dag_obj - - # Ensure that warnings from inside DAG() are emitted from the caller, not here - fixup_decorator_warning_stack(factory) - return factory - - return wrapper - - STATICA_HACK = True globals()["kcah_acitats"[::-1].upper()] = False if STATICA_HACK: # pragma: no cover @@ -3320,54 +2428,6 @@ def factory(*args, **kwargs): """:sphinx-autoapi-skip:""" -class DagContext: - """ - DAG context is used to keep the current DAG when DAG is used as ContextManager. - - You can use DAG as context: - - .. code-block:: python - - with DAG( - dag_id="example_dag", - default_args=default_args, - schedule="0 0 * * *", - dagrun_timeout=timedelta(minutes=60), - ) as dag: - ... - - If you do this the context stores the DAG and whenever new task is created, it will use - such stored DAG as the parent DAG. - - """ - - _context_managed_dags: deque[DAG] = deque() - autoregistered_dags: set[tuple[DAG, ModuleType]] = set() - current_autoregister_module_name: str | None = None - - @classmethod - def push_context_managed_dag(cls, dag: DAG): - cls._context_managed_dags.appendleft(dag) - - @classmethod - def pop_context_managed_dag(cls) -> DAG | None: - dag = cls._context_managed_dags.popleft() - - # In a few cases around serialization we explicitly push None in to the stack - if cls.current_autoregister_module_name is not None and dag and dag.auto_register: - mod = sys.modules[cls.current_autoregister_module_name] - cls.autoregistered_dags.add((dag, mod)) - - return dag - - @classmethod - def get_current_dag(cls) -> DAG | None: - try: - return cls._context_managed_dags[0] - except IndexError: - return None - - def _run_inline_trigger(trigger): async def _run_inline_trigger_main(): async for event in trigger.run(): diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index b2d45a133187e..c9ad8edaa4018 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -245,7 +245,9 @@ def get_dag(self, dag_id, session: Session = None): # If the dag corresponding to root_dag_id is absent or expired is_missing = root_dag_id not in self.dags - is_expired = orm_dag.last_expired and dag and dag.last_loaded < orm_dag.last_expired + is_expired = ( + orm_dag.last_expired and dag and dag.last_loaded and dag.last_loaded < orm_dag.last_expired + ) if is_expired: # Remove associated dags so we can re-add them. self.dags = {key: dag for key, dag in self.dags.items()} @@ -278,7 +280,7 @@ def _add_dag_from_db(self, dag_id: str, session: Session): def process_file(self, filepath, only_if_updated=True, safe_mode=True): """Given a path to a python module or zip file, import the module and look for dag objects within.""" - from airflow.models.dag import DagContext + from airflow.sdk.definitions.contextmanager import DagContext # if the source file no longer exists in the DB or in the filesystem, # return an empty list @@ -326,7 +328,7 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): return found_dags def _load_modules_from_file(self, filepath, safe_mode): - from airflow.models.dag import DagContext + from airflow.sdk.definitions.contextmanager import DagContext if not might_contain_dag(filepath, safe_mode): # Don't want to spam user with skip messages @@ -382,7 +384,7 @@ def parse(mod_name, filepath): return parse(mod_name, filepath) def _load_modules_from_zip(self, filepath, safe_mode): - from airflow.models.dag import DagContext + from airflow.sdk.definitions.contextmanager import DagContext mods = [] with zipfile.ZipFile(filepath) as current_zip_file: @@ -431,7 +433,8 @@ def _load_modules_from_zip(self, filepath, safe_mode): return mods def _process_modules(self, filepath, mods, file_last_changed_on_disk): - from airflow.models.dag import DAG, DagContext # Avoid circular import + from airflow.models.dag import DAG # Avoid circular import + from airflow.sdk.definitions.contextmanager import DagContext top_level_dags = {(o, m) for m in mods for o in m.__dict__.values() if isinstance(o, DAG)} diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 20ec12b9915e7..5de0466a6be0e 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -91,7 +91,7 @@ CreatedTasks = TypeVar("CreatedTasks", Iterator["dict[str, Any]"], Iterator[TI]) -RUN_ID_REGEX = r"^(?:manual|scheduled|dataset_triggered)__(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00)$" +RUN_ID_REGEX = r"^(?:manual|scheduled|asset_triggered)__(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00)$" class TISchedulingDecision(NamedTuple): diff --git a/airflow/models/dagwarning.py b/airflow/models/dagwarning.py index ffab515f85495..e0c271c4c8ec2 100644 --- a/airflow/models/dagwarning.py +++ b/airflow/models/dagwarning.py @@ -104,4 +104,5 @@ class DagWarningType(str, Enum): in the DagWarning model. """ + ASSET_CONFLICT = "asset conflict" NONEXISTENT_POOL = "non-existent pool" diff --git a/airflow/models/mappedoperator.py b/airflow/models/mappedoperator.py index 8a9e790ea7fc6..925acfc16f0c1 100644 --- a/airflow/models/mappedoperator.py +++ b/airflow/models/mappedoperator.py @@ -201,8 +201,8 @@ def _expand(self, expand_input: ExpandInput, *, strict: bool) -> MappedOperator: task_id = partial_kwargs.pop("task_id") dag = partial_kwargs.pop("dag") task_group = partial_kwargs.pop("task_group") - start_date = partial_kwargs.pop("start_date") - end_date = partial_kwargs.pop("end_date") + start_date = partial_kwargs.pop("start_date", None) + end_date = partial_kwargs.pop("end_date", None) try: operator_name = self.operator_class.custom_operator_name # type: ignore @@ -333,7 +333,8 @@ def __attrs_post_init__(self): @classmethod def get_serialized_fields(cls): # Not using 'cls' here since we only want to serialize base fields. - return frozenset(attr.fields_dict(MappedOperator)) - { + return (frozenset(attr.fields_dict(MappedOperator)) | {"task_type"}) - { + "_task_type", "dag", "deps", "expand_input", # This is needed to be able to accept XComArg. diff --git a/airflow/models/param.py b/airflow/models/param.py index 895cd2af8bb42..28253a86f5ca6 100644 --- a/airflow/models/param.py +++ b/airflow/models/param.py @@ -27,9 +27,9 @@ from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: - from airflow.models.dag import DAG from airflow.models.dagrun import DagRun from airflow.models.operator import Operator + from airflow.sdk import DAG from airflow.serialization.pydantic.dag_run import DagRunPydantic from airflow.utils.context import Context diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py index 12cbdb380b920..a67c7cf310ba4 100644 --- a/airflow/models/skipmixin.py +++ b/airflow/models/skipmixin.py @@ -36,7 +36,7 @@ from airflow.models.dagrun import DagRun from airflow.models.operator import Operator - from airflow.models.taskmixin import DAGNode + from airflow.sdk.definitions.node import DAGNode from airflow.serialization.pydantic.dag_run import DagRunPydantic from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic diff --git a/airflow/models/taskfail.py b/airflow/models/taskfail.py deleted file mode 100644 index 1bf7db7a11ac3..0000000000000 --- a/airflow/models/taskfail.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -"""Taskfail tracks the failed run durations of each task instance.""" - -from __future__ import annotations - -from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, text -from sqlalchemy.orm import relationship - -from airflow.models.base import StringID, TaskInstanceDependencies -from airflow.utils.sqlalchemy import UtcDateTime - - -class TaskFail(TaskInstanceDependencies): - """TaskFail tracks the failed run durations of each task instance.""" - - __tablename__ = "task_fail" - - id = Column(Integer, primary_key=True) - task_id = Column(StringID(), nullable=False) - dag_id = Column(StringID(), nullable=False) - run_id = Column(StringID(), nullable=False) - map_index = Column(Integer, nullable=False, server_default=text("-1")) - start_date = Column(UtcDateTime) - end_date = Column(UtcDateTime) - duration = Column(Integer) - - __table_args__ = ( - Index("idx_task_fail_task_instance", dag_id, task_id, run_id, map_index), - ForeignKeyConstraint( - [dag_id, task_id, run_id, map_index], - [ - "task_instance.dag_id", - "task_instance.task_id", - "task_instance.run_id", - "task_instance.map_index", - ], - name="task_fail_ti_fkey", - ondelete="CASCADE", - ), - ) - - # We don't need a DB level FK here, as we already have that to TI (which has one to DR) but by defining - # the relationship we can more easily find the execution date for these rows - dag_run = relationship( - "DagRun", - primaryjoin="""and_( - TaskFail.dag_id == foreign(DagRun.dag_id), - TaskFail.run_id == foreign(DagRun.run_id), - )""", - viewonly=True, - ) - - def __init__(self, ti): - self.dag_id = ti.dag_id - self.task_id = ti.task_id - self.run_id = ti.run_id - self.map_index = ti.map_index - self.start_date = ti.start_date - self.end_date = ti.end_date - if self.end_date and self.start_date: - self.duration = int((self.end_date - self.start_date).total_seconds()) - else: - self.duration = None - - def __repr__(self): - prefix = f"<{self.__class__.__name__}: {self.dag_id}.{self.task_id} {self.run_id}" - if self.map_index != -1: - prefix += f" map_index={self.map_index}" - return prefix + ">" diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 1dbe299f25b39..e86c47778246e 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -39,6 +39,7 @@ import jinja2 import lazy_object_proxy import pendulum +import uuid6 from jinja2 import TemplateAssertionError, UndefinedError from sqlalchemy import ( Column, @@ -50,6 +51,7 @@ PrimaryKeyConstraint, String, Text, + UniqueConstraint, and_, delete, false, @@ -59,6 +61,7 @@ text, update, ) +from sqlalchemy.dialects import postgresql from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.ext.mutable import MutableDict @@ -93,7 +96,6 @@ from airflow.models.log import Log from airflow.models.param import process_params from airflow.models.renderedtifields import get_serialized_template_fields -from airflow.models.taskfail import TaskFail from airflow.models.taskinstancekey import TaskInstanceKey from airflow.models.taskmap import TaskMap from airflow.models.taskreschedule import TaskReschedule @@ -131,7 +133,7 @@ tuple_in_condition, with_row_locks, ) -from airflow.utils.state import DagRunState, JobState, State, TaskInstanceState +from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.task_group import MappedTaskGroup from airflow.utils.task_instance_session import set_current_task_instance_session from airflow.utils.timeout import timeout @@ -155,9 +157,10 @@ from airflow.models.abstractoperator import TaskStateChangeCallback from airflow.models.baseoperator import BaseOperator - from airflow.models.dag import DAG, DagModel + from airflow.models.dag import DAG as SchedulerDAG, DagModel from airflow.models.dagrun import DagRun from airflow.models.operator import Operator + from airflow.sdk import DAG from airflow.serialization.pydantic.asset import AssetEventPydantic from airflow.serialization.pydantic.dag import DagModelPydantic from airflow.serialization.pydantic.taskinstance import TaskInstancePydantic @@ -218,11 +221,16 @@ def _add_log( ) +@internal_api_call +@provide_session +def _update_ti_heartbeat(id: str, when: datetime, session: Session = NEW_SESSION): + session.execute(update(TaskInstance).where(TaskInstance.id == id).values(last_heartbeat_at=when)) + + def _run_raw_task( ti: TaskInstance | TaskInstancePydantic, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, raise_on_defer: bool = False, session: Session | None = None, @@ -246,7 +254,6 @@ def _run_raw_task( ti.test_mode = test_mode ti.refresh_from_task(ti.task, pool_override=pool) ti.refresh_from_db(session=session) - ti.job_id = job_id ti.hostname = get_hostname() ti.pid = os.getpid() if not test_mode: @@ -448,7 +455,6 @@ def clear_task_instances( If set to False, DagRuns state will not be changed. :param dag: DAG object """ - job_ids = [] # Keys: dag_id -> run_id -> map_indexes -> try_numbers -> task_id task_id_by_key: dict[str, dict[str, dict[int, dict[int, set[str]]]]] = defaultdict( lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(set))) @@ -459,11 +465,9 @@ def clear_task_instances( for ti in tis: TaskInstanceHistory.record_ti(ti, session) if ti.state == TaskInstanceState.RUNNING: - if ti.job_id: - # If a task is cleared when running, set its state to RESTARTING so that - # the task is terminated and becomes eligible for retry. - ti.state = TaskInstanceState.RESTARTING - job_ids.append(ti.job_id) + # If a task is cleared when running, set its state to RESTARTING so that + # the task is terminated and becomes eligible for retry. + ti.state = TaskInstanceState.RESTARTING else: ti_dag = dag if dag and dag.dag_id == ti.dag_id else dag_bag.get_dag(ti.dag_id, session=session) task_id = ti.task_id @@ -519,11 +523,6 @@ def clear_task_instances( delete_qry = TR.__table__.delete().where(conditions) session.execute(delete_qry) - if job_ids: - from airflow.jobs.job import Job - - session.execute(update(Job).where(Job.id.in_(job_ids)).values(state=JobState.RESTARTING)) - if dag_run_state is not False and tis: from airflow.models.dagrun import DagRun # Avoid circular import @@ -794,6 +793,7 @@ def _execute_callable(context: Context, **execute_callable_kwargs): def _set_ti_attrs(target, source, include_dag_run=False): # Fields ordered per model definition + target.id = source.id target.start_date = source.start_date target.end_date = source.end_date target.duration = source.duration @@ -802,7 +802,6 @@ def _set_ti_attrs(target, source, include_dag_run=False): target.max_tries = source.max_tries target.hostname = source.hostname target.unixname = source.unixname - target.job_id = source.job_id target.pool = source.pool target.pool_slots = source.pool_slots or 1 target.queue = source.queue @@ -811,6 +810,7 @@ def _set_ti_attrs(target, source, include_dag_run=False): target.custom_operator_name = source.custom_operator_name target.queued_dttm = source.queued_dttm target.queued_by_job_id = source.queued_by_job_id + target.last_heartbeat_at = source.last_heartbeat_at target.pid = source.pid target.executor = source.executor target.executor_config = source.executor_config @@ -928,7 +928,7 @@ def _clear_next_method_args(*, task_instance: TaskInstance | TaskInstancePydanti def _get_template_context( *, task_instance: TaskInstance | TaskInstancePydantic, - dag: DAG, + dag: SchedulerDAG, session: Session | None = None, ignore_param_exceptions: bool = True, ) -> Context: @@ -958,7 +958,8 @@ def _get_template_context( assert task.dag if task.dag.__class__ is AttributeRemoved: - task.dag = dag # required after deserialization + # TODO: Task-SDK: Remove this after AIP-44 code is removed + task.dag = dag # type: ignore[assignment] # required after deserialization dag_run = task_instance.get_dagrun(session) data_interval = dag.get_run_data_interval(dag_run) @@ -1086,11 +1087,11 @@ def get_triggering_events() -> dict[str, list[AssetEvent | AssetEventPydantic]]: nonlocal dag_run if dag_run not in session: dag_run = session.merge(dag_run, load=False) - asset_events = dag_run.consumed_dataset_events + asset_events = dag_run.consumed_asset_events triggering_events: dict[str, list[AssetEvent | AssetEventPydantic]] = defaultdict(list) for event in asset_events: - if event.dataset: - triggering_events[event.dataset.uri].append(event) + if event.asset: + triggering_events[event.asset.uri].append(event) return triggering_events @@ -1316,8 +1317,10 @@ def _record_task_map_for_downstreams( """ from airflow.models.mappedoperator import MappedOperator + # TODO: Task-SDK: Remove this after AIP-44 code is removed if task.dag.__class__ is AttributeRemoved: - task.dag = dag # required after deserialization + # required after deserialization + task.dag = dag # type: ignore[assignment] if next(task.iter_mapped_dependants(), None) is None: # No mapped dependants, no need to validate. return @@ -1793,6 +1796,11 @@ def _handle_reschedule( return ti +def uuid7() -> str: + """Generate a new UUID7 string.""" + return str(uuid6.uuid7()) + + class TaskInstance(Base, LoggingMixin): """ Task instances store the state of a task instance. @@ -1813,10 +1821,16 @@ class TaskInstance(Base, LoggingMixin): """ __tablename__ = "task_instance" - task_id = Column(StringID(), primary_key=True, nullable=False) - dag_id = Column(StringID(), primary_key=True, nullable=False) - run_id = Column(StringID(), primary_key=True, nullable=False) - map_index = Column(Integer, primary_key=True, nullable=False, server_default=text("-1")) + id = Column( + String(36).with_variant(postgresql.UUID(as_uuid=False), "postgresql"), + primary_key=True, + default=uuid7, + nullable=False, + ) + task_id = Column(StringID(), nullable=False) + dag_id = Column(StringID(), nullable=False) + run_id = Column(StringID(), nullable=False) + map_index = Column(Integer, nullable=False, server_default=text("-1")) start_date = Column(UtcDateTime) end_date = Column(UtcDateTime) @@ -1826,7 +1840,6 @@ class TaskInstance(Base, LoggingMixin): max_tries = Column(Integer, server_default=text("-1")) hostname = Column(String(1000)) unixname = Column(String(1000)) - job_id = Column(Integer) pool = Column(String(256), nullable=False) pool_slots = Column(Integer, default=1, nullable=False) queue = Column(String(256)) @@ -1835,6 +1848,8 @@ class TaskInstance(Base, LoggingMixin): custom_operator_name = Column(String(1000)) queued_dttm = Column(UtcDateTime) queued_by_job_id = Column(Integer) + + last_heartbeat_at = Column(UtcDateTime) pid = Column(Integer) executor = Column(String(1000)) executor_config = Column(ExecutorConfigType(pickler=dill)) @@ -1867,9 +1882,10 @@ class TaskInstance(Base, LoggingMixin): Index("ti_state", state), Index("ti_state_lkp", dag_id, task_id, run_id, state), Index("ti_pool", pool, state, priority_weight), - Index("ti_job_id", job_id), Index("ti_trigger_id", trigger_id), - PrimaryKeyConstraint("dag_id", "task_id", "run_id", "map_index", name="task_instance_pkey"), + Index("ti_heartbeat", last_heartbeat_at), + PrimaryKeyConstraint("id", name="task_instance_pkey"), + UniqueConstraint("dag_id", "task_id", "run_id", "map_index", name="task_instance_composite_key"), ForeignKeyConstraint( [trigger_id], ["trigger.id"], @@ -1938,6 +1954,8 @@ def __init__( self.run_id = run_id self.try_number = 0 self.max_tries = self.task.retries + if not self.id: + self.id = uuid7() self.unixname = getuser() if state: self.state = state @@ -2014,7 +2032,6 @@ def _command_as_list( local: bool = False, pickle_id: int | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, ) -> list[str]: @@ -2053,7 +2070,6 @@ def _command_as_list( pickle_id=pickle_id, file_path=path, raw=raw, - job_id=job_id, pool=pool, cfg_path=cfg_path, map_index=ti.map_index, @@ -2070,7 +2086,6 @@ def command_as_list( local: bool = False, pickle_id: int | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, ) -> list[str]: @@ -2090,7 +2105,6 @@ def command_as_list( local=local, pickle_id=pickle_id, raw=raw, - job_id=job_id, pool=pool, cfg_path=cfg_path, ) @@ -2110,7 +2124,6 @@ def generate_command( pickle_id: int | None = None, file_path: PurePath | str | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, map_index: int = -1, @@ -2135,7 +2148,6 @@ def generate_command( associated with the pickled DAG :param file_path: path to the file containing the DAG definition :param raw: raw mode (needs more details) - :param job_id: job ID (needs more details) :param pool: the Airflow pool that the task should run in :param cfg_path: the Path to the configuration file :return: shell command that can be used to run the task instance @@ -2145,8 +2157,6 @@ def generate_command( cmd.extend(["--mark-success"]) if pickle_id: cmd.extend(["--pickle", str(pickle_id)]) - if job_id: - cmd.extend(["--job-id", str(job_id)]) if ignore_all_deps: cmd.extend(["--ignore-all-dependencies"]) if ignore_task_deps: @@ -2620,7 +2630,6 @@ def _check_and_change_state_before_execution( mark_success: bool = False, test_mode: bool = False, hostname: str = "", - job_id: str | None = None, pool: str | None = None, external_executor_id: str | None = None, session: Session = NEW_SESSION, @@ -2640,7 +2649,6 @@ def _check_and_change_state_before_execution( :param mark_success: Don't run the task, mark its state as success :param test_mode: Doesn't record success or failure in the DB :param hostname: The hostname of the worker running the task instance. - :param job_id: Job (LocalTaskJob / SchedulerJob) ID :param pool: specifies the pool to use to run the task instance :param external_executor_id: The identifier of the celery executor :param session: SQLAlchemy ORM Session @@ -2663,7 +2671,6 @@ def _check_and_change_state_before_execution( ti.refresh_from_task(task, pool_override=pool) ti.test_mode = test_mode ti.refresh_from_db(session=session, lock_for_update=True) - ti.job_id = job_id ti.hostname = hostname ti.pid = None @@ -2768,7 +2775,6 @@ def check_and_change_state_before_execution( ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, external_executor_id: str | None = None, session: Session = NEW_SESSION, @@ -2784,7 +2790,6 @@ def check_and_change_state_before_execution( mark_success=mark_success, test_mode=test_mode, hostname=get_hostname(), - job_id=job_id, pool=pool, external_executor_id=external_executor_id, session=session, @@ -2855,7 +2860,6 @@ def _run_raw_task( self, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, raise_on_defer: bool = False, session: Session = NEW_SESSION, @@ -2880,7 +2884,6 @@ def _run_raw_task( ti=self, mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, raise_on_defer=raise_on_defer, session=session, @@ -2911,22 +2914,22 @@ def _register_asset_changes(self, *, events: OutletEventAccessors, session: Sess frozen_extra = frozenset(asset_alias_event["extra"].items()) asset_alias_names[(asset_uri, frozen_extra)].add(asset_alias_name) - dataset_models: dict[str, AssetModel] = { - dataset_obj.uri: dataset_obj - for dataset_obj in session.scalars( + asset_models: dict[str, AssetModel] = { + asset_obj.uri: asset_obj + for asset_obj in session.scalars( select(AssetModel).where(AssetModel.uri.in_(uri for uri, _ in asset_alias_names)) ) } - if missing_datasets := [Asset(uri=u) for u, _ in asset_alias_names if u not in dataset_models]: - dataset_models.update( - (dataset_obj.uri, dataset_obj) - for dataset_obj in asset_manager.create_assets(missing_datasets, session=session) + if missing_assets := [Asset(uri=u) for u, _ in asset_alias_names if u not in asset_models]: + asset_models.update( + (asset_obj.uri, asset_obj) + for asset_obj in asset_manager.create_assets(missing_assets, session=session) ) - self.log.warning("Created new datasets for alias reference: %s", missing_datasets) + self.log.warning("Created new assets for alias reference: %s", missing_assets) session.flush() # Needed because we need the id for fk. for (uri, extra_items), alias_names in asset_alias_names.items(): - asset_obj = dataset_models[uri] + asset_obj = asset_models[uri] self.log.info( 'Creating event for %r through aliases "%s"', asset_obj, @@ -2935,7 +2938,7 @@ def _register_asset_changes(self, *, events: OutletEventAccessors, session: Sess asset_manager.register_asset_change( task_instance=self, asset=asset_obj, - aliases=[AssetAlias(name) for name in alias_names], + aliases=[AssetAlias(name=name) for name in alias_names], extra=dict(extra_items), session=session, source_alias_names=alias_names, @@ -3050,6 +3053,11 @@ def _execute_task(self, context: Context, task_orig: Operator): """ return _execute_task(self, context, task_orig) + def update_heartbeat(self): + cm = nullcontext() if InternalApiConfig.get_use_internal_api() else create_session() + with cm as session_or_null: + _update_ti_heartbeat(self.id, timezone.utcnow(), session_or_null) + @provide_session def defer_task(self, exception: TaskDeferred | None, session: Session = NEW_SESSION) -> None: """ @@ -3080,7 +3088,6 @@ def run( ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, session: Session = NEW_SESSION, raise_on_defer: bool = False, @@ -3095,7 +3102,6 @@ def run( ignore_ti_state=ignore_ti_state, mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, session=session, ) @@ -3105,7 +3111,6 @@ def run( self._run_raw_task( mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, session=session, raise_on_defer=raise_on_defer, @@ -3169,9 +3174,15 @@ def fetch_handle_failure_context( fail_stop: bool = False, ): """ - Handle Failure for the TaskInstance. + Fetch the context needed to handle a failure. - :param fail_stop: if true, stop remaining tasks in dag + :param ti: TaskInstance + :param error: if specified, log the specific exception if thrown + :param test_mode: doesn't record success or failure in the DB if True + :param context: Jinja2 context + :param force_fail: if True, task does not retry + :param session: SQLAlchemy ORM Session + :param fail_stop: if True, fail all downstream tasks """ if error: if isinstance(error, BaseException): @@ -3193,9 +3204,6 @@ def fetch_handle_failure_context( if not test_mode: session.add(Log(TaskInstanceState.FAILED.value, ti)) - # Log failure duration - session.add(TaskFail(ti=ti)) - ti.clear_next_method_args() # In extreme cases (zombie in case of dag with parse error) we might _not_ have a Task. @@ -3673,21 +3681,15 @@ def _schedule_downstream_tasks( assert task assert task.dag - # Get a partial DAG with just the specific tasks we want to examine. - # In order for dep checks to work correctly, we include ourself (so - # TriggerRuleDep can check the state of the task we just executed). - partial_dag = task.dag.partial_subset( - task.downstream_task_ids, - include_downstream=True, - include_upstream=False, - include_direct_upstream=True, - ) - - dag_run.dag = partial_dag + # Previously, this section used task.dag.partial_subset to retrieve a partial DAG. + # However, this approach is unsafe as it can result in incomplete or incorrect task execution, + # leading to potential bad cases. As a result, the operation has been removed. + # For more details, refer to the discussion in PR #[https://github.com/apache/airflow/pull/42582]. + dag_run.dag = task.dag info = dag_run.task_instance_scheduling_decisions(session) skippable_task_ids = { - task_id for task_id in partial_dag.task_ids if task_id not in task.downstream_task_ids + task_id for task_id in task.dag.task_ids if task_id not in task.downstream_task_ids } schedulable_tis = [ @@ -3837,7 +3839,6 @@ def clear_db_references(self, session: Session): from airflow.models.renderedtifields import RenderedTaskInstanceFields tables: list[type[TaskInstanceDependencies]] = [ - TaskFail, TaskInstanceNote, TaskReschedule, XCom, diff --git a/airflow/models/taskinstancehistory.py b/airflow/models/taskinstancehistory.py index ccdca700af6e9..8c77daf925793 100644 --- a/airflow/models/taskinstancehistory.py +++ b/airflow/models/taskinstancehistory.py @@ -70,7 +70,6 @@ class TaskInstanceHistory(Base): max_tries = Column(Integer, server_default=text("-1")) hostname = Column(String(1000)) unixname = Column(String(1000)) - job_id = Column(Integer) pool = Column(String(256), nullable=False) pool_slots = Column(Integer, default=1, nullable=False) queue = Column(String(256)) diff --git a/airflow/models/taskinstancekey.py b/airflow/models/taskinstancekey.py index d6e647f598809..7261c5dbe8f78 100644 --- a/airflow/models/taskinstancekey.py +++ b/airflow/models/taskinstancekey.py @@ -34,14 +34,6 @@ def primary(self) -> tuple[str, str, str, int]: """Return task instance primary key part of the key.""" return self.dag_id, self.task_id, self.run_id, self.map_index - @property - def reduced(self) -> TaskInstanceKey: - """Remake the key by subtracting 1 from try number to match in memory information.""" - # todo (dstandish): remove this property - return TaskInstanceKey( - self.dag_id, self.task_id, self.run_id, max(1, self.try_number - 1), self.map_index - ) - def with_try_number(self, try_number: int) -> TaskInstanceKey: """Return TaskInstanceKey with provided ``try_number``.""" return TaskInstanceKey(self.dag_id, self.task_id, self.run_id, try_number, self.map_index) diff --git a/airflow/models/taskmixin.py b/airflow/models/taskmixin.py index 05768ff36fe13..fa76a3815cb8e 100644 --- a/airflow/models/taskmixin.py +++ b/airflow/models/taskmixin.py @@ -16,271 +16,13 @@ # under the License. from __future__ import annotations -from abc import ABCMeta, abstractmethod -from typing import TYPE_CHECKING, Any, Iterable, Sequence - -from airflow.exceptions import AirflowException -from airflow.utils.types import NOTSET +from typing import TYPE_CHECKING if TYPE_CHECKING: - from logging import Logger - - import pendulum - - from airflow.models.baseoperator import BaseOperator - from airflow.models.dag import DAG - from airflow.models.operator import Operator - from airflow.serialization.enums import DagAttributeTypes - from airflow.utils.edgemodifier import EdgeModifier - from airflow.utils.task_group import TaskGroup - from airflow.utils.types import ArgNotSet - - -class DependencyMixin: - """Mixing implementing common dependency setting methods like >> and <<.""" - - @property - def roots(self) -> Sequence[DependencyMixin]: - """ - List of root nodes -- ones with no upstream dependencies. - - a.k.a. the "start" of this sub-graph - """ - raise NotImplementedError() - - @property - def leaves(self) -> Sequence[DependencyMixin]: - """ - List of leaf nodes -- ones with only upstream dependencies. - - a.k.a. the "end" of this sub-graph - """ - raise NotImplementedError() - - @abstractmethod - def set_upstream( - self, other: DependencyMixin | Sequence[DependencyMixin], edge_modifier: EdgeModifier | None = None - ): - """Set a task or a task list to be directly upstream from the current task.""" - raise NotImplementedError() - - @abstractmethod - def set_downstream( - self, other: DependencyMixin | Sequence[DependencyMixin], edge_modifier: EdgeModifier | None = None - ): - """Set a task or a task list to be directly downstream from the current task.""" - raise NotImplementedError() - - def as_setup(self) -> DependencyMixin: - """Mark a task as setup task.""" - raise NotImplementedError() - - def as_teardown( - self, - *, - setups: BaseOperator | Iterable[BaseOperator] | ArgNotSet = NOTSET, - on_failure_fail_dagrun=NOTSET, - ) -> DependencyMixin: - """Mark a task as teardown and set its setups as direct relatives.""" - raise NotImplementedError() - - def update_relative( - self, other: DependencyMixin, upstream: bool = True, edge_modifier: EdgeModifier | None = None - ) -> None: - """ - Update relationship information about another DependencyMixin. Default is no-op. - - Override if necessary. - """ - - def __lshift__(self, other: DependencyMixin | Sequence[DependencyMixin]): - """Implement Task << Task.""" - self.set_upstream(other) - return other - - def __rshift__(self, other: DependencyMixin | Sequence[DependencyMixin]): - """Implement Task >> Task.""" - self.set_downstream(other) - return other - - def __rrshift__(self, other: DependencyMixin | Sequence[DependencyMixin]): - """Implement Task >> [Task] because list don't have __rshift__ operators.""" - self.__lshift__(other) - return self - - def __rlshift__(self, other: DependencyMixin | Sequence[DependencyMixin]): - """Implement Task << [Task] because list don't have __lshift__ operators.""" - self.__rshift__(other) - return self - - @classmethod - def _iter_references(cls, obj: Any) -> Iterable[tuple[DependencyMixin, str]]: - from airflow.models.baseoperator import AbstractOperator - from airflow.utils.mixins import ResolveMixin - - if isinstance(obj, AbstractOperator): - yield obj, "operator" - elif isinstance(obj, ResolveMixin): - yield from obj.iter_references() - elif isinstance(obj, Sequence): - for o in obj: - yield from cls._iter_references(o) - - -class DAGNode(DependencyMixin, metaclass=ABCMeta): - """ - A base class for a node in the graph of a workflow. - - A node may be an Operator or a Task Group, either mapped or unmapped. - """ - - dag: DAG | None = None - task_group: TaskGroup | None = None - """The task_group that contains this node""" - - @property - @abstractmethod - def node_id(self) -> str: - raise NotImplementedError() - - @property - def label(self) -> str | None: - tg = self.task_group - if tg and tg.node_id and tg.prefix_group_id: - # "task_group_id.task_id" -> "task_id" - return self.node_id[len(tg.node_id) + 1 :] - return self.node_id - - start_date: pendulum.DateTime | None - end_date: pendulum.DateTime | None - upstream_task_ids: set[str] - downstream_task_ids: set[str] - - def has_dag(self) -> bool: - return self.dag is not None - - @property - def dag_id(self) -> str: - """Returns dag id if it has one or an adhoc/meaningless ID.""" - if self.dag: - return self.dag.dag_id - return "_in_memory_dag_" - - @property - def log(self) -> Logger: - raise NotImplementedError() - - @property - @abstractmethod - def roots(self) -> Sequence[DAGNode]: - raise NotImplementedError() - - @property - @abstractmethod - def leaves(self) -> Sequence[DAGNode]: - raise NotImplementedError() - - def _set_relatives( - self, - task_or_task_list: DependencyMixin | Sequence[DependencyMixin], - upstream: bool = False, - edge_modifier: EdgeModifier | None = None, - ) -> None: - """Set relatives for the task or task list.""" - from airflow.models.baseoperator import BaseOperator - from airflow.models.mappedoperator import MappedOperator - - if not isinstance(task_or_task_list, Sequence): - task_or_task_list = [task_or_task_list] - - task_list: list[Operator] = [] - for task_object in task_or_task_list: - task_object.update_relative(self, not upstream, edge_modifier=edge_modifier) - relatives = task_object.leaves if upstream else task_object.roots - for task in relatives: - if not isinstance(task, (BaseOperator, MappedOperator)): - raise AirflowException( - f"Relationships can only be set between Operators; received {task.__class__.__name__}" - ) - task_list.append(task) - - # relationships can only be set if the tasks share a single DAG. Tasks - # without a DAG are assigned to that DAG. - dags: set[DAG] = {task.dag for task in [*self.roots, *task_list] if task.has_dag() and task.dag} - - if len(dags) > 1: - raise AirflowException(f"Tried to set relationships between tasks in more than one DAG: {dags}") - elif len(dags) == 1: - dag = dags.pop() - else: - raise AirflowException( - f"Tried to create relationships between tasks that don't have DAGs yet. " - f"Set the DAG for at least one task and try again: {[self, *task_list]}" - ) - - if not self.has_dag(): - # If this task does not yet have a dag, add it to the same dag as the other task. - self.dag = dag - - for task in task_list: - if dag and not task.has_dag(): - # If the other task does not yet have a dag, add it to the same dag as this task and - dag.add_task(task) - if upstream: - task.downstream_task_ids.add(self.node_id) - self.upstream_task_ids.add(task.node_id) - if edge_modifier: - edge_modifier.add_edge_info(self.dag, task.node_id, self.node_id) - else: - self.downstream_task_ids.add(task.node_id) - task.upstream_task_ids.add(self.node_id) - if edge_modifier: - edge_modifier.add_edge_info(self.dag, self.node_id, task.node_id) - - def set_downstream( - self, - task_or_task_list: DependencyMixin | Sequence[DependencyMixin], - edge_modifier: EdgeModifier | None = None, - ) -> None: - """Set a node (or nodes) to be directly downstream from the current node.""" - self._set_relatives(task_or_task_list, upstream=False, edge_modifier=edge_modifier) - - def set_upstream( - self, - task_or_task_list: DependencyMixin | Sequence[DependencyMixin], - edge_modifier: EdgeModifier | None = None, - ) -> None: - """Set a node (or nodes) to be directly upstream from the current node.""" - self._set_relatives(task_or_task_list, upstream=True, edge_modifier=edge_modifier) - - @property - def downstream_list(self) -> Iterable[Operator]: - """List of nodes directly downstream.""" - if not self.dag: - raise AirflowException(f"Operator {self} has not been assigned to a DAG yet") - return [self.dag.get_task(tid) for tid in self.downstream_task_ids] - - @property - def upstream_list(self) -> Iterable[Operator]: - """List of nodes directly upstream.""" - if not self.dag: - raise AirflowException(f"Operator {self} has not been assigned to a DAG yet") - return [self.dag.get_task(tid) for tid in self.upstream_task_ids] - - def get_direct_relative_ids(self, upstream: bool = False) -> set[str]: - """Get set of the direct relative ids to the current task, upstream or downstream.""" - if upstream: - return self.upstream_task_ids - else: - return self.downstream_task_ids + from airflow.typing_compat import TypeAlias - def get_direct_relatives(self, upstream: bool = False) -> Iterable[DAGNode]: - """Get list of the direct relatives to the current task, upstream or downstream.""" - if upstream: - return self.upstream_list - else: - return self.downstream_list +import airflow.sdk.definitions.mixins +import airflow.sdk.definitions.node - def serialize_for_task_group(self) -> tuple[DagAttributeTypes, Any]: - """Serialize a task group's content; used by TaskGroupSerialization.""" - raise NotImplementedError() +DependencyMixin: TypeAlias = airflow.sdk.definitions.mixins.DependencyMixin +DAGNode: TypeAlias = airflow.sdk.definitions.node.DAGNode diff --git a/airflow/models/xcom_arg.py b/airflow/models/xcom_arg.py index 83ff4f25c6376..c28af6acbe5ae 100644 --- a/airflow/models/xcom_arg.py +++ b/airflow/models/xcom_arg.py @@ -29,22 +29,21 @@ from airflow.models import MappedOperator, TaskInstance from airflow.models.abstractoperator import AbstractOperator from airflow.models.taskmixin import DependencyMixin +from airflow.sdk.types import NOTSET, ArgNotSet from airflow.utils.db import exists_query from airflow.utils.mixins import ResolveMixin from airflow.utils.session import NEW_SESSION, provide_session from airflow.utils.setup_teardown import SetupTeardownContext from airflow.utils.state import State from airflow.utils.trigger_rule import TriggerRule -from airflow.utils.types import NOTSET, ArgNotSet from airflow.utils.xcom import XCOM_RETURN_KEY if TYPE_CHECKING: from sqlalchemy.orm import Session - from airflow.models.baseoperator import BaseOperator - from airflow.models.dag import DAG + # from airflow.models.dag import DAG from airflow.models.operator import Operator - from airflow.models.taskmixin import DAGNode + from airflow.sdk import DAG, BaseOperator from airflow.utils.context import Context from airflow.utils.edgemodifier import EdgeModifier @@ -122,7 +121,7 @@ def iter_xcom_references(arg: Any) -> Iterator[tuple[Operator, str]]: yield from XComArg.iter_xcom_references(getattr(arg, attr)) @staticmethod - def apply_upstream_relationship(op: Operator, arg: Any): + def apply_upstream_relationship(op: DependencyMixin, arg: Any): """ Set dependency for XComArgs. @@ -134,12 +133,12 @@ def apply_upstream_relationship(op: Operator, arg: Any): op.set_upstream(operator) @property - def roots(self) -> list[DAGNode]: + def roots(self) -> list[Operator]: """Required by DependencyMixin.""" return [op for op, _ in self.iter_references()] @property - def leaves(self) -> list[DAGNode]: + def leaves(self) -> list[Operator]: """Required by DependencyMixin.""" return [op for op, _ in self.iter_references()] @@ -394,15 +393,15 @@ def as_setup(self) -> DependencyMixin: def as_teardown( self, *, - setups: BaseOperator | Iterable[BaseOperator] | ArgNotSet = NOTSET, - on_failure_fail_dagrun=NOTSET, + setups: BaseOperator | Iterable[BaseOperator] | None = None, + on_failure_fail_dagrun: bool | None = None, ): for operator, _ in self.iter_references(): operator.is_teardown = True operator.trigger_rule = TriggerRule.ALL_DONE_SETUP_SUCCESS - if on_failure_fail_dagrun is not NOTSET: + if on_failure_fail_dagrun is not None: operator.on_failure_fail_dagrun = on_failure_fail_dagrun - if not isinstance(setups, ArgNotSet): + if setups is not None: setups = [setups] if isinstance(setups, DependencyMixin) else setups for s in setups: s.is_setup = True diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index 2ec1388d16361..881e07a81ed67 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -50,7 +50,6 @@ from types import ModuleType from typing import Generator - from airflow.hooks.base import BaseHook from airflow.listeners.listener import ListenerManager from airflow.timetables.base import Timetable @@ -62,9 +61,7 @@ loaded_plugins: set[str] = set() # Plugin components to integrate as modules -registered_hooks: list[BaseHook] | None = None macros_modules: list[Any] | None = None -executors_modules: list[Any] | None = None # Plugin components to integrate directly admin_views: list[Any] | None = None @@ -87,8 +84,6 @@ during deserialization """ PLUGINS_ATTRIBUTES_TO_DUMP = { - "hooks", - "executors", "macros", "admin_views", "flask_blueprints", @@ -153,8 +148,6 @@ class AirflowPlugin: name: str | None = None source: AirflowPluginSource | None = None - hooks: list[Any] = [] - executors: list[Any] = [] macros: list[Any] = [] admin_views: list[Any] = [] flask_blueprints: list[Any] = [] @@ -348,7 +341,7 @@ def ensure_plugins_loaded(): """ from airflow.stats import Stats - global plugins, registered_hooks + global plugins if plugins is not None: log.debug("Plugins are already loaded. Skipping.") @@ -361,7 +354,6 @@ def ensure_plugins_loaded(): with Stats.timer() as timer: plugins = [] - registered_hooks = [] load_plugins_from_plugin_directory() load_entrypoint_plugins() @@ -369,11 +361,6 @@ def ensure_plugins_loaded(): if not settings.LAZY_LOAD_PROVIDERS: load_providers_plugins() - # We don't do anything with these for now, but we want to keep track of - # them so we can integrate them in to the UI's Connection screens - for plugin in plugins: - registered_hooks.extend(plugin.hooks) - if plugins: log.debug("Loading %d plugin(s) took %.2f seconds", len(plugins), timer.duration) @@ -533,33 +520,6 @@ def initialize_hook_lineage_readers_plugins(): hook_lineage_reader_classes.extend(plugin.hook_lineage_readers) -def integrate_executor_plugins() -> None: - """Integrate executor plugins to the context.""" - global plugins - global executors_modules - - if executors_modules is not None: - return - - ensure_plugins_loaded() - - if plugins is None: - raise AirflowPluginException("Can't load plugins.") - - log.debug("Integrate executor plugins") - - executors_modules = [] - for plugin in plugins: - if plugin.name is None: - raise AirflowPluginException("Invalid plugin name") - plugin_name: str = plugin.name - - executors_module = make_module("airflow.executors." + plugin_name, plugin.executors) - if executors_module: - executors_modules.append(executors_module) - sys.modules[executors_module.__name__] = executors_module - - def integrate_macros_plugins() -> None: """Integrates macro plugins.""" global plugins @@ -615,7 +575,6 @@ def get_plugin_info(attrs_to_dump: Iterable[str] | None = None) -> list[dict[str :param attrs_to_dump: A list of plugin attributes to dump """ ensure_plugins_loaded() - integrate_executor_plugins() integrate_macros_plugins() initialize_web_ui_plugins() initialize_fastapi_plugins() @@ -629,7 +588,7 @@ def get_plugin_info(attrs_to_dump: Iterable[str] | None = None) -> list[dict[str for attr in attrs_to_dump: if attr in ("global_operator_extra_links", "operator_extra_links"): info[attr] = [f"<{qualname(d.__class__)} object>" for d in getattr(plugin, attr)] - elif attr in ("macros", "timetables", "hooks", "executors", "priority_weight_strategies"): + elif attr in ("macros", "timetables", "priority_weight_strategies"): info[attr] = [qualname(d) for d in getattr(plugin, attr)] elif attr == "listeners": # listeners may be modules or class instances diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json index 35e266c310ac3..9e7edc449a698 100644 --- a/airflow/provider.yaml.schema.json +++ b/airflow/provider.yaml.schema.json @@ -215,11 +215,11 @@ }, "factory": { "type": ["string", "null"], - "description": "Dataset factory for specified URI. Creates AIP-60 compliant Dataset." + "description": "Asset factory for specified URI. Creates AIP-60 compliant Asset." }, "to_openlineage_converter": { "type": ["string", "null"], - "description": "OpenLineage converter function for specified URI schemes. Import path to a callable accepting a Dataset and LineageContext and returning OpenLineage dataset." + "description": "OpenLineage converter function for specified URI schemes. Import path to a callable accepting an Asset and LineageContext and returning OpenLineage dataset." } } } @@ -463,7 +463,6 @@ "source-date-epoch": { "type": "integer", "description": "Source date epoch - seconds since epoch (gmtime) when the release documentation was prepared. Used to generate reproducible package builds with flint.", - "minimum": 0, "default": 0, "examples": [ diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index 573d256d6e59a..3c9f225f8e930 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -25,11 +25,11 @@ import json import logging import os -import sys import traceback import warnings from dataclasses import dataclass from functools import wraps +from importlib.resources import files as resource_files from time import perf_counter from typing import TYPE_CHECKING, Any, Callable, MutableMapping, NamedTuple, TypeVar @@ -47,10 +47,6 @@ log = logging.getLogger(__name__) -if sys.version_info >= (3, 9): - from importlib.resources import files as resource_files -else: - from importlib_resources import files as resource_files PS = ParamSpec("PS") RT = TypeVar("RT") diff --git a/airflow/reproducible_build.yaml b/airflow/reproducible_build.yaml index bbf930c9d5fa2..e02435f29f060 100644 --- a/airflow/reproducible_build.yaml +++ b/airflow/reproducible_build.yaml @@ -1,2 +1,2 @@ -release-notes-hash: 48b5906017b111b01aeb940a1ec904bd -source-date-epoch: 1728409953 +release-notes-hash: c1049adaf66bd2d1cc6550b406594d24 +source-date-epoch: 1729642591 diff --git a/airflow/sensors/external_task.py b/airflow/sensors/external_task.py index 8eb501e281dda..331e17168bab7 100644 --- a/airflow/sensors/external_task.py +++ b/airflow/sensors/external_task.py @@ -20,7 +20,7 @@ import datetime import os import warnings -from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Collection, Iterable from airflow.configuration import conf from airflow.exceptions import AirflowException, AirflowSkipException @@ -476,7 +476,7 @@ class ExternalTaskMarker(EmptyOperator): operator_extra_links = [ExternalDagLink()] # The _serialized_fields are lazily loaded when get_serialized_fields() method is called - __serialized_fields: frozenset[str] | None = None + __serialized_fields: ClassVar[frozenset[str] | None] = None def __init__( self, diff --git a/airflow/serialization/pydantic/asset.py b/airflow/serialization/pydantic/asset.py index 4cd264902091a..611730dd92e47 100644 --- a/airflow/serialization/pydantic/asset.py +++ b/airflow/serialization/pydantic/asset.py @@ -23,7 +23,7 @@ class DagScheduleAssetReferencePydantic(BaseModelPydantic): """Serializable version of the DagScheduleAssetReference ORM SqlAlchemyModel used by internal API.""" - dataset_id: int + asset_id: int dag_id: str created_at: datetime updated_at: datetime @@ -34,7 +34,7 @@ class DagScheduleAssetReferencePydantic(BaseModelPydantic): class TaskOutletAssetReferencePydantic(BaseModelPydantic): """Serializable version of the TaskOutletAssetReference ORM SqlAlchemyModel used by internal API.""" - dataset_id: int + asset_id: int dag_id: str task_id: str created_at: datetime @@ -62,13 +62,13 @@ class AssetEventPydantic(BaseModelPydantic): """Serializable representation of the AssetEvent ORM SqlAlchemyModel used by internal API.""" id: int - dataset_id: Optional[int] + asset_id: Optional[int] extra: dict source_task_id: Optional[str] source_dag_id: Optional[str] source_run_id: Optional[str] source_map_index: Optional[int] timestamp: datetime - dataset: Optional[AssetPydantic] + asset: Optional[AssetPydantic] model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True) diff --git a/airflow/serialization/pydantic/dag.py b/airflow/serialization/pydantic/dag.py index f7cb90797d5c8..4e37a633da058 100644 --- a/airflow/serialization/pydantic/dag.py +++ b/airflow/serialization/pydantic/dag.py @@ -82,7 +82,6 @@ class DagModelPydantic(BaseModelPydantic): last_parsed_time: Optional[datetime] last_pickled: Optional[datetime] last_expired: Optional[datetime] - scheduler_lock: Optional[bool] pickle_id: Optional[int] fileloc: str processor_subdir: Optional[str] diff --git a/airflow/serialization/pydantic/dag_run.py b/airflow/serialization/pydantic/dag_run.py index 86857452e8310..fd12ca12c0184 100644 --- a/airflow/serialization/pydantic/dag_run.py +++ b/airflow/serialization/pydantic/dag_run.py @@ -55,7 +55,7 @@ class DagRunPydantic(BaseModelPydantic): dag_hash: Optional[str] updated_at: Optional[datetime] dag: Optional[PydanticDag] - consumed_dataset_events: List[AssetEventPydantic] # noqa: UP006 + consumed_asset_events: List[AssetEventPydantic] # noqa: UP006 log_template_id: Optional[int] triggered_by: Optional[DagRunTriggeredByType] diff --git a/airflow/serialization/pydantic/taskinstance.py b/airflow/serialization/pydantic/taskinstance.py index caf44bea4c673..bf121353ca80e 100644 --- a/airflow/serialization/pydantic/taskinstance.py +++ b/airflow/serialization/pydantic/taskinstance.py @@ -40,6 +40,7 @@ ) from airflow.serialization.pydantic.dag import DagModelPydantic from airflow.serialization.pydantic.dag_run import DagRunPydantic +from airflow.utils import timezone from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname from airflow.utils.xcom import XCOM_RETURN_KEY @@ -83,6 +84,7 @@ def validated_operator(x: dict[str, Any] | Operator, _info: ValidationInfo) -> A class TaskInstancePydantic(BaseModelPydantic, LoggingMixin): """Serializable representation of the TaskInstance ORM SqlAlchemyModel used by internal API.""" + id: str task_id: str dag_id: str run_id: str @@ -96,7 +98,6 @@ class TaskInstancePydantic(BaseModelPydantic, LoggingMixin): max_tries: int hostname: str unixname: str - job_id: Optional[int] pool: str pool_slots: int queue: str @@ -105,6 +106,7 @@ class TaskInstancePydantic(BaseModelPydantic, LoggingMixin): custom_operator_name: Optional[str] queued_dttm: Optional[datetime] queued_by_job_id: Optional[int] + last_heartbeat_at: Optional[datetime] = None pid: Optional[int] executor: Optional[str] executor_config: Any @@ -138,7 +140,6 @@ def _run_raw_task( self, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, raise_on_defer: bool = False, session: Session | None = None, @@ -147,7 +148,6 @@ def _run_raw_task( ti=self, mark_success=mark_success, test_mode=test_mode, - job_id=job_id, pool=pool, raise_on_defer=raise_on_defer, session=session, @@ -252,6 +252,12 @@ def refresh_from_db(self, session: Session | None = None, lock_for_update: bool _refresh_from_db(task_instance=self, session=session, lock_for_update=lock_for_update) + def update_heartbeat(self): + """Update the recorded heartbeat for this task to "now".""" + from airflow.models.taskinstance import _update_ti_heartbeat + + return _update_ti_heartbeat(self.id, timezone.utcnow()) + def set_duration(self) -> None: """Set task instance duration.""" from airflow.models.taskinstance import _set_duration @@ -441,7 +447,6 @@ def check_and_change_state_before_execution( ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, - job_id: str | None = None, pool: str | None = None, external_executor_id: str | None = None, session: Session | None = None, @@ -457,7 +462,6 @@ def check_and_change_state_before_execution( mark_success=mark_success, test_mode=test_mode, hostname=get_hostname(), - job_id=job_id, pool=pool, external_executor_id=external_executor_id, session=session, @@ -484,7 +488,6 @@ def command_as_list( local: bool = False, pickle_id: int | None = None, raw: bool = False, - job_id: str | None = None, pool: str | None = None, cfg_path: str | None = None, ) -> list[str]: @@ -504,7 +507,6 @@ def command_as_list( local=local, pickle_id=pickle_id, raw=raw, - job_id=job_id, pool=pool, cfg_path=cfg_path, ) diff --git a/airflow/serialization/schema.json b/airflow/serialization/schema.json index 63cdf67b7d702..32ccd3dfff9c1 100644 --- a/airflow/serialization/schema.json +++ b/airflow/serialization/schema.json @@ -53,7 +53,7 @@ { "type": "integer" } ] }, - "dataset": { + "asset": { "type": "object", "properties": { "uri": { "type": "string" }, @@ -66,14 +66,14 @@ }, "required": [ "uri", "extra" ] }, - "typed_dataset": { + "typed_asset": { "type": "object", "properties": { "__type": { "type": "string", - "constant": "dataset" + "constant": "asset" }, - "__var": { "$ref": "#/definitions/dataset" } + "__var": { "$ref": "#/definitions/asset" } }, "required": [ "__type", @@ -81,17 +81,17 @@ ], "additionalProperties": false }, - "typed_dataset_cond": { + "typed_asset_cond": { "type": "object", "properties": { "__type": { "anyOf": [{ "type": "string", - "constant": "dataset_or" + "constant": "asset_or" }, { "type": "string", - "constant": "dataset_and" + "constant": "asset_and" } ] }, @@ -99,8 +99,8 @@ "type": "array", "items": { "anyOf": [ - {"$ref": "#/definitions/typed_dataset"}, - { "$ref": "#/definitions/typed_dataset_cond"} + {"$ref": "#/definitions/typed_asset"}, + { "$ref": "#/definitions/typed_asset_cond"} ] } } @@ -137,7 +137,7 @@ "type": "object", "properties": { "params": { "$ref": "#/definitions/params" }, - "_dag_id": { "type": "string" }, + "dag_id": { "type": "string" }, "tasks": { "$ref": "#/definitions/tasks" }, "timezone": { "$ref": "#/definitions/timezone" }, "owner_links": { "type": "object" }, @@ -156,11 +156,10 @@ {"type": "string"} ] }, - "orientation": { "type" : "string"}, - "_dag_display_property_value": { "type" : "string"}, - "_description": { "type" : "string"}, + "dag_display_name": { "type" : "string"}, + "description": { "type" : "string"}, "_concurrency": { "type" : "number"}, - "_max_active_tasks": { "type" : "number"}, + "max_active_tasks": { "type" : "number"}, "max_active_runs": { "type" : "number"}, "max_consecutive_failed_dag_runs": { "type" : "number"}, "default_args": { "$ref": "#/definitions/dict" }, @@ -168,14 +167,13 @@ "end_date": { "$ref": "#/definitions/datetime" }, "dagrun_timeout": { "$ref": "#/definitions/timedelta" }, "doc_md": { "type" : "string"}, - "_default_view": { "type" : "string"}, - "_access_control": {"$ref": "#/definitions/dict" }, + "access_control": {"$ref": "#/definitions/dict" }, "is_paused_upon_creation": { "type": "boolean" }, "has_on_success_callback": { "type": "boolean" }, "has_on_failure_callback": { "type": "boolean" }, "render_template_as_native_obj": { "type": "boolean" }, "tags": { "type": "array" }, - "_task_group": {"anyOf": [ + "task_group": {"anyOf": [ { "type": "null" }, { "$ref": "#/definitions/task_group" } ]}, @@ -183,7 +181,7 @@ "dag_dependencies": { "$ref": "#/definitions/dag_dependencies" } }, "required": [ - "_dag_id", + "dag_id", "fileloc", "tasks" ], @@ -219,7 +217,7 @@ "$comment": "A task/operator in a DAG", "type": "object", "required": [ - "_task_type", + "task_type", "_task_module", "task_id", "ui_color", @@ -227,7 +225,7 @@ "template_fields" ], "properties": { - "_task_type": { "type": "string" }, + "task_type": { "type": "string" }, "_task_module": { "type": "string" }, "_operator_extra_links": { "$ref": "#/definitions/extra_links" }, "task_id": { "type": "string" }, @@ -261,7 +259,6 @@ "type": "array", "items": { "type": "string" } }, - "subdag": { "$ref": "#/definitions/dag" }, "downstream_task_ids": { "type": "array", "items": { "type": "string" } diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 9f180c2a5deac..79403860f5fac 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -21,7 +21,7 @@ import collections.abc import datetime import enum -import inspect +import itertools import logging import weakref from functools import cache @@ -59,6 +59,7 @@ from airflow.models.tasklog import LogTemplate from airflow.models.xcom_arg import XComArg, deserialize_xcom_arg, serialize_xcom_arg from airflow.providers_manager import ProvidersManager +from airflow.sdk import BaseOperator as TaskSDKBaseOperator from airflow.serialization.dag_dependency import DagDependency from airflow.serialization.enums import DagAttributeTypes as DAT, Encoding from airflow.serialization.helpers import serialize_template_field @@ -101,7 +102,7 @@ from airflow.models.baseoperatorlink import BaseOperatorLink from airflow.models.expandinput import ExpandInput from airflow.models.operator import Operator - from airflow.models.taskmixin import DAGNode + from airflow.sdk.definitions.node import DAGNode from airflow.serialization.json_schema import Validator from airflow.ti_deps.deps.base_ti_dep import BaseTIDep from airflow.timetables.base import Timetable @@ -265,7 +266,7 @@ def encode_asset_condition(var: BaseAsset) -> dict[str, Any]: def decode_asset_condition(var: dict[str, Any]) -> BaseAsset: """ - Decode a previously serialized dataset condition. + Decode a previously serialized asset condition. :meta private: """ @@ -597,7 +598,7 @@ def serialize_to_json( if key == "_operator_name": # when operator_name matches task_type, we can remove # it to reduce the JSON payload - task_type = getattr(object_to_serialize, "_task_type", None) + task_type = getattr(object_to_serialize, "task_type", None) if value != task_type: serialized_object[key] = cls.serialize(value) elif key in decorated_fields: @@ -740,8 +741,8 @@ def serialize( elif isinstance(var, LazySelectSequence): return cls.serialize(list(var)) elif isinstance(var, BaseAsset): - serialized_dataset = encode_asset_condition(var) - return cls._encode(serialized_dataset, type_=serialized_dataset.pop("__type")) + serialized_asset = encode_asset_condition(var) + return cls._encode(serialized_asset, type_=serialized_asset.pop("__type")) elif isinstance(var, SimpleTaskInstance): return cls._encode( cls.serialize(var.__dict__, strict=strict, use_pydantic_models=use_pydantic_models), @@ -920,10 +921,11 @@ def _value_is_hardcoded_default(cls, attrname: str, value: Any, instance: Any) - to account for the case where the default value of the field is None but has the ``field = field or {}`` set. """ - if attrname in cls._CONSTRUCTOR_PARAMS and ( - cls._CONSTRUCTOR_PARAMS[attrname] is value or (value in [{}, []]) - ): - return True + if attrname in cls._CONSTRUCTOR_PARAMS: + if cls._CONSTRUCTOR_PARAMS[attrname] is value or (value in [{}, []]): + return True + if cls._CONSTRUCTOR_PARAMS[attrname] is attrs.NOTHING and value is None: + return True return False @classmethod @@ -1079,7 +1081,10 @@ class SerializedBaseOperator(BaseOperator, BaseSerialization): _CONSTRUCTOR_PARAMS = { k: v.default - for k, v in signature(BaseOperator.__init__).parameters.items() + for k, v in itertools.chain( + signature(BaseOperator.__init__).parameters.items(), + signature(TaskSDKBaseOperator.__init__).parameters.items(), + ) if v.default is not v.empty } @@ -1151,9 +1156,9 @@ def _serialize_node(cls, op: BaseOperator | MappedOperator, include_deps: bool) """Serialize operator into a JSON object.""" serialize_op = cls.serialize_to_json(op, cls._decorated_fields) - serialize_op["_task_type"] = getattr(op, "_task_type", type(op).__name__) + serialize_op["task_type"] = getattr(op, "task_type", type(op).__name__) serialize_op["_task_module"] = getattr(op, "_task_module", type(op).__module__) - if op.operator_name != serialize_op["_task_type"]: + if op.operator_name != serialize_op["task_type"]: serialize_op["_operator_name"] = op.operator_name # Used to determine if an Operator is inherited from EmptyOperator @@ -1177,7 +1182,7 @@ def _serialize_node(cls, op: BaseOperator | MappedOperator, include_deps: bool) # Store all template_fields as they are if there are JSON Serializable # If not, store them as strings # And raise an exception if the field is not templateable - forbidden_fields = set(inspect.signature(BaseOperator.__init__).parameters.keys()) + forbidden_fields = set(SerializedBaseOperator._CONSTRUCTOR_PARAMS.keys()) # Though allow some of the BaseOperator fields to be templated anyway forbidden_fields.difference_update({"email"}) if op.template_fields: @@ -1242,7 +1247,7 @@ def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: op_extra_links_from_plugin = {} if "_operator_name" not in encoded_op: - encoded_op["_operator_name"] = encoded_op["_task_type"] + encoded_op["_operator_name"] = encoded_op["task_type"] # We don't want to load Extra Operator links in Scheduler if cls._load_operator_extra_links: @@ -1256,7 +1261,7 @@ def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: for ope in plugins_manager.operator_extra_links: for operator in ope.operators: if ( - operator.__name__ == encoded_op["_task_type"] + operator.__name__ == encoded_op["task_type"] and operator.__module__ == encoded_op["_task_module"] ): op_extra_links_from_plugin.update({ope.name: ope}) @@ -1272,6 +1277,8 @@ def populate_operator(cls, op: Operator, encoded_op: dict[str, Any]) -> None: if k in ("_outlets", "_inlets"): # `_outlets` -> `outlets` k = k[1:] + elif k == "task_type": + k = "_task_type" if k == "_downstream_task_ids": # Upgrade from old format/name k = "downstream_task_ids" @@ -1383,7 +1390,7 @@ def deserialize_operator(cls, encoded_op: dict[str, Any]) -> Operator: try: operator_name = encoded_op["_operator_name"] except KeyError: - operator_name = encoded_op["_task_type"] + operator_name = encoded_op["task_type"] op = MappedOperator( operator_class=op_data, @@ -1400,7 +1407,7 @@ def deserialize_operator(cls, encoded_op: dict[str, Any]) -> Operator: ui_fgcolor=BaseOperator.ui_fgcolor, is_empty=False, task_module=encoded_op["_task_module"], - task_type=encoded_op["_task_type"], + task_type=encoded_op["task_type"], operator_name=operator_name, dag=None, task_group=None, @@ -1576,16 +1583,13 @@ class SerializedDAG(DAG, BaseSerialization): not pickle-able. SerializedDAG works for all DAGs. """ - _decorated_fields = {"default_args", "_access_control"} + _decorated_fields = {"default_args", "access_control"} @staticmethod def __get_constructor_defaults(): param_to_attr = { - "max_active_tasks": "_max_active_tasks", - "dag_display_name": "_dag_display_property_value", "description": "_description", "default_view": "_default_view", - "access_control": "_access_control", } return { param_to_attr.get(k, k): v.default @@ -1613,7 +1617,7 @@ def serialize_dag(cls, dag: DAG) -> dict: ] dag_deps.extend(DependencyDetector.detect_dag_dependencies(dag)) serialized_dag["dag_dependencies"] = [x.__dict__ for x in sorted(dag_deps)] - serialized_dag["_task_group"] = TaskGroupSerialization.serialize_task_group(dag.task_group) + serialized_dag["task_group"] = TaskGroupSerialization.serialize_task_group(dag.task_group) # Edge info in the JSON exactly matches our internal structure serialized_dag["edge_info"] = dag.edge_info @@ -1633,7 +1637,7 @@ def serialize_dag(cls, dag: DAG) -> dict: @classmethod def deserialize_dag(cls, encoded_dag: dict[str, Any]) -> SerializedDAG: """Deserializes a DAG from a JSON object.""" - dag = SerializedDAG(dag_id=encoded_dag["_dag_id"], schedule=None) + dag = SerializedDAG(dag_id=encoded_dag["dag_id"], schedule=None) for k, v in encoded_dag.items(): if k == "_downstream_task_ids": @@ -1668,20 +1672,21 @@ def deserialize_dag(cls, encoded_dag: dict[str, Any]) -> SerializedDAG: v = set(v) # else use v as it is - setattr(dag, k, v) + object.__setattr__(dag, k, v) # Set _task_group - if "_task_group" in encoded_dag: - dag._task_group = TaskGroupSerialization.deserialize_task_group( - encoded_dag["_task_group"], + if "task_group" in encoded_dag: + tg = TaskGroupSerialization.deserialize_task_group( + encoded_dag["task_group"], None, dag.task_dict, dag, ) + object.__setattr__(dag, "task_group", tg) else: # This must be old data that had no task_group. Create a root TaskGroup and add # all tasks to it. - dag._task_group = TaskGroup.create_root(dag) + object.__setattr__(dag, "task_group", TaskGroup.create_root(dag)) for task in dag.tasks: dag.task_group.add(task) @@ -1704,8 +1709,10 @@ def deserialize_dag(cls, encoded_dag: dict[str, Any]) -> SerializedDAG: def _is_excluded(cls, var: Any, attrname: str, op: DAGNode): # {} is explicitly different from None in the case of DAG-level access control # and as a result we need to preserve empty dicts through serialization for this field - if attrname == "_access_control" and var is not None: + if attrname == "access_control" and var is not None: return False + if attrname == "dag_display_name" and var == op.dag_id: + return True return super()._is_excluded(var, attrname, op) @classmethod diff --git a/airflow/settings.py b/airflow/settings.py index a6adbbcf9ff77..89c77a2abfe31 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -617,7 +617,15 @@ def configure_adapters(): if SQL_ALCHEMY_CONN.startswith("mysql"): try: - import MySQLdb.converters + try: + import MySQLdb.converters + except ImportError: + raise RuntimeError( + "You do not have `mysqlclient` package installed. " + "Please install it with `pip install mysqlclient` and make sure you have system " + "mysql libraries installed, as well as well as `pkg-config` system package " + "installed in case you see compilation error during installation." + ) MySQLdb.converters.conversions[Pendulum] = MySQLdb.converters.DateTime2literal except ImportError: @@ -741,6 +749,9 @@ def initialize(): configure_orm() configure_action_logging() + # mask the sensitive_config_values + conf.mask_secrets() + # Run any custom runtime checks that needs to be executed for providers run_providers_custom_runtime_checks() diff --git a/airflow/task/priority_strategy.py b/airflow/task/priority_strategy.py index c22bdfa9940c6..dcef1c865b6e4 100644 --- a/airflow/task/priority_strategy.py +++ b/airflow/task/priority_strategy.py @@ -22,8 +22,6 @@ from abc import ABC, abstractmethod from typing import TYPE_CHECKING, Any -from airflow.exceptions import AirflowException - if TYPE_CHECKING: from airflow.models.taskinstance import TaskInstance @@ -150,5 +148,5 @@ def validate_and_load_priority_weight_strategy( priority_weight_strategy_class = qualname(priority_weight_strategy) loaded_priority_weight_strategy = _get_registered_priority_weight_strategy(priority_weight_strategy_class) if loaded_priority_weight_strategy is None: - raise AirflowException(f"Unknown priority strategy {priority_weight_strategy_class}") + raise ValueError(f"Unknown priority strategy {priority_weight_strategy_class}") return loaded_priority_weight_strategy() diff --git a/airflow/task/task_runner/standard_task_runner.py b/airflow/task/standard_task_runner.py similarity index 59% rename from airflow/task/task_runner/standard_task_runner.py rename to airflow/task/standard_task_runner.py index 6a4351e17a5b5..a5641002c961c 100644 --- a/airflow/task/task_runner/standard_task_runner.py +++ b/airflow/task/standard_task_runner.py @@ -22,6 +22,7 @@ import logging import os import signal +import subprocess import threading import time from typing import TYPE_CHECKING @@ -29,22 +30,81 @@ import psutil from setproctitle import setproctitle +from airflow.configuration import conf +from airflow.exceptions import AirflowConfigException from airflow.models.taskinstance import TaskReturnCode from airflow.settings import CAN_FORK from airflow.stats import Stats -from airflow.task.task_runner.base_task_runner import BaseTaskRunner +from airflow.utils.configuration import tmp_configuration_copy from airflow.utils.dag_parsing_context import _airflow_parsing_context_manager +from airflow.utils.log.logging_mixin import LoggingMixin +from airflow.utils.net import get_hostname +from airflow.utils.platform import IS_WINDOWS, getuser from airflow.utils.process_utils import reap_process_group, set_new_process_group if TYPE_CHECKING: from airflow.jobs.local_task_job_runner import LocalTaskJobRunner -class StandardTaskRunner(BaseTaskRunner): - """Standard runner for all tasks.""" +class StandardTaskRunner(LoggingMixin): + """ + Runs Airflow task instances via CLI. + + Invoke the `airflow tasks run` command with raw mode enabled in a subprocess. + + :param job_runner: The LocalTaskJobRunner associated with the task runner + """ def __init__(self, job_runner: LocalTaskJobRunner): - super().__init__(job_runner=job_runner) + self.job_runner = job_runner + super().__init__(job_runner.task_instance) + self._task_instance = job_runner.task_instance + + popen_prepend = [] + if self._task_instance.run_as_user: + self.run_as_user: str | None = self._task_instance.run_as_user + else: + try: + self.run_as_user = conf.get("core", "default_impersonation") + except AirflowConfigException: + self.run_as_user = None + + # Add sudo commands to change user if we need to. Needed to handle SubDagOperator + # case using a SequentialExecutor. + self.log.debug("Planning to run as the %s user", self.run_as_user) + if self.run_as_user and (self.run_as_user != getuser()): + # We want to include any environment variables now, as we won't + # want to have to specify them in the sudo call - they would show + # up in `ps` that way! And run commands now, as the other user + # might not be able to run the cmds to get credentials + cfg_path = tmp_configuration_copy(chmod=0o600, include_env=True, include_cmds=True) + + # Give ownership of file to user; only they can read and write + subprocess.check_call(["sudo", "chown", self.run_as_user, cfg_path], close_fds=True) + + # propagate PYTHONPATH environment variable + pythonpath_value = os.environ.get("PYTHONPATH", "") + popen_prepend = ["sudo", "-E", "-H", "-u", self.run_as_user] + + if pythonpath_value: + popen_prepend.append(f"PYTHONPATH={pythonpath_value}") + + else: + # Always provide a copy of the configuration file settings. Since + # we are running as the same user, and can pass through environment + # variables then we don't need to include those in the config copy + # - the runner can read/execute those values as it needs + cfg_path = tmp_configuration_copy(chmod=0o600, include_env=False, include_cmds=False) + + self._cfg_path = cfg_path + self._command = popen_prepend + self._task_instance.command_as_list( + raw=True, + pickle_id=self.job_runner.pickle_id, + mark_success=self.job_runner.mark_success, + pool=self.job_runner.pool, + cfg_path=cfg_path, + ) + self.process = None self._rc = None if TYPE_CHECKING: assert self._task_instance.task @@ -98,15 +158,10 @@ def _start_by_fork(self): # [1:] - remove "airflow" from the start of the command args = parser.parse_args(self._command[1:]) - # We prefer the job_id passed on the command-line because at this time, the - # task instance may not have been updated. - job_id = getattr(args, "job_id", self._task_instance.job_id) self.log.info("Running: %s", self._command) - self.log.info("Job %s: Subtask %s", job_id, self._task_instance.task_id) + self.log.info("Subtask %s", self._task_instance.task_id) proc_title = "airflow task runner: {0.dag_id} {0.task_id} {0.execution_date_or_run_id}" - if job_id is not None: - proc_title += " {0.job_id}" setproctitle(proc_title.format(args)) return_code = 0 try: @@ -118,15 +173,11 @@ def _start_by_fork(self): return_code = 0 if isinstance(ret, TaskReturnCode): return_code = ret.value - except Exception as exc: + except Exception: return_code = 1 self.log.exception( - "Failed to execute job %s for task %s (%s; %r)", - job_id, - self._task_instance.task_id, - exc, - os.getpid(), + "Failed to execute task_id=%s pid=%r", self._task_instance.task_id, os.getpid() ) except SystemExit as sys_ex: # Someone called sys.exit() in the fork - mistakenly. You should not run sys.exit() in @@ -189,10 +240,10 @@ def terminate(self): if self._rc == -signal.SIGKILL: self.log.error( ( - "Job %s was killed before it finished (likely due to running out of memory)", + "TI %s was killed before it finished (likely due to running out of memory)", "For more information, see https://airflow.apache.org/docs/apache-airflow/stable/troubleshooting.html#LocalTaskJob-killed", ), - self._task_instance.job_id, + self._task_instance.id, ) def get_process_pid(self) -> int: @@ -216,3 +267,69 @@ def _read_task_utilization(self): except (psutil.NoSuchProcess, psutil.AccessDenied, AttributeError): self.log.info("Process not found (most likely exited), stop collecting metrics") return + + def _read_task_logs(self, stream): + while True: + line = stream.readline() + if isinstance(line, bytes): + line = line.decode("utf-8") + if not line: + break + self.log.info( + "Task %s %s", + self._task_instance.task_id, + line.rstrip("\n"), + ) + + def run_command(self, run_with=None) -> subprocess.Popen: + """ + Run the task command. + + :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']`` + :return: the process that was run + """ + run_with = run_with or [] + full_cmd = run_with + self._command + + self.log.info("Running on host: %s", get_hostname()) + self.log.info("Running: %s", full_cmd) + with _airflow_parsing_context_manager( + dag_id=self._task_instance.dag_id, + task_id=self._task_instance.task_id, + ): + if IS_WINDOWS: + proc = subprocess.Popen( + full_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + close_fds=True, + env=os.environ.copy(), + ) + else: + proc = subprocess.Popen( + full_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + close_fds=True, + env=os.environ.copy(), + preexec_fn=os.setsid, + ) + + # Start daemon thread to read subprocess logging output + log_reader = threading.Thread( + target=self._read_task_logs, + args=(proc.stdout,), + ) + log_reader.daemon = True + log_reader.start() + return proc + + def on_finish(self) -> None: + """Execute when this is done running.""" + if self._cfg_path and os.path.isfile(self._cfg_path): + if self.run_as_user: + subprocess.call(["sudo", "rm", self._cfg_path], close_fds=True) + else: + os.remove(self._cfg_path) diff --git a/airflow/task/task_runner/__init__.py b/airflow/task/task_runner/__init__.py deleted file mode 100644 index 11a35c177b434..0000000000000 --- a/airflow/task/task_runner/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING - -from airflow.configuration import conf -from airflow.exceptions import AirflowConfigException -from airflow.utils.module_loading import import_string - -if TYPE_CHECKING: - from airflow.jobs.local_task_job_runner import LocalTaskJobRunner - from airflow.task.task_runner.base_task_runner import BaseTaskRunner - -log = logging.getLogger(__name__) - -_TASK_RUNNER_NAME = conf.get("core", "TASK_RUNNER") - -STANDARD_TASK_RUNNER = "StandardTaskRunner" - -CGROUP_TASK_RUNNER = "CgroupTaskRunner" - -CORE_TASK_RUNNERS = { - STANDARD_TASK_RUNNER: "airflow.task.task_runner.standard_task_runner.StandardTaskRunner", - CGROUP_TASK_RUNNER: "airflow.task.task_runner.cgroup_task_runner.CgroupTaskRunner", -} - - -def get_task_runner(local_task_job_runner: LocalTaskJobRunner) -> BaseTaskRunner: - """ - Get the task runner that can be used to run with the given job runner. - - :param local_task_job_runner: The LocalTaskJobRunner associated with the TaskInstance - that needs to be executed. - :return: The task runner to use to run the task. - """ - if _TASK_RUNNER_NAME in CORE_TASK_RUNNERS: - log.debug("Loading core task runner: %s", _TASK_RUNNER_NAME) - task_runner_class = import_string(CORE_TASK_RUNNERS[_TASK_RUNNER_NAME]) - else: - log.debug("Loading task runner from custom path: %s", _TASK_RUNNER_NAME) - try: - task_runner_class = import_string(_TASK_RUNNER_NAME) - except ImportError: - raise AirflowConfigException( - f'The task runner could not be loaded. Please check "task_runner" key in "core" section. ' - f'Current value: "{_TASK_RUNNER_NAME}".' - ) - task_runner = task_runner_class(local_task_job_runner) - return task_runner diff --git a/airflow/task/task_runner/base_task_runner.py b/airflow/task/task_runner/base_task_runner.py deleted file mode 100644 index 8a332fd4e3ba2..0000000000000 --- a/airflow/task/task_runner/base_task_runner.py +++ /dev/null @@ -1,202 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -"""Base task runner.""" - -from __future__ import annotations - -import os -import subprocess -import threading - -from airflow.utils.dag_parsing_context import _airflow_parsing_context_manager -from airflow.utils.platform import IS_WINDOWS - -if not IS_WINDOWS: - # ignored to avoid flake complaining on Linux - from pwd import getpwnam # noqa: F401 - -from typing import TYPE_CHECKING - -from airflow.configuration import conf -from airflow.exceptions import AirflowConfigException -from airflow.utils.configuration import tmp_configuration_copy -from airflow.utils.log.logging_mixin import LoggingMixin -from airflow.utils.net import get_hostname -from airflow.utils.platform import getuser - -if TYPE_CHECKING: - from airflow.jobs.local_task_job_runner import LocalTaskJobRunner - -PYTHONPATH_VAR = "PYTHONPATH" - - -class BaseTaskRunner(LoggingMixin): - """ - Runs Airflow task instances via CLI. - - Invoke the `airflow tasks run` command with raw mode enabled in a subprocess. - - :param job_runner: The LocalTaskJobRunner associated with the task runner - """ - - def __init__(self, job_runner: LocalTaskJobRunner): - self.job_runner = job_runner - super().__init__(job_runner.task_instance) - self._task_instance = job_runner.task_instance - - popen_prepend = [] - if self._task_instance.run_as_user: - self.run_as_user: str | None = self._task_instance.run_as_user - else: - try: - self.run_as_user = conf.get("core", "default_impersonation") - except AirflowConfigException: - self.run_as_user = None - - # Add sudo commands to change user if we need to. Needed to handle SubDagOperator - # case using a SequentialExecutor. - self.log.debug("Planning to run as the %s user", self.run_as_user) - if self.run_as_user and (self.run_as_user != getuser()): - # We want to include any environment variables now, as we won't - # want to have to specify them in the sudo call - they would show - # up in `ps` that way! And run commands now, as the other user - # might not be able to run the cmds to get credentials - cfg_path = tmp_configuration_copy(chmod=0o600, include_env=True, include_cmds=True) - - # Give ownership of file to user; only they can read and write - subprocess.check_call(["sudo", "chown", self.run_as_user, cfg_path], close_fds=True) - - # propagate PYTHONPATH environment variable - pythonpath_value = os.environ.get(PYTHONPATH_VAR, "") - popen_prepend = ["sudo", "-E", "-H", "-u", self.run_as_user] - - if pythonpath_value: - popen_prepend.append(f"{PYTHONPATH_VAR}={pythonpath_value}") - - else: - # Always provide a copy of the configuration file settings. Since - # we are running as the same user, and can pass through environment - # variables then we don't need to include those in the config copy - # - the runner can read/execute those values as it needs - cfg_path = tmp_configuration_copy(chmod=0o600, include_env=False, include_cmds=False) - - self._cfg_path = cfg_path - self._command = popen_prepend + self._task_instance.command_as_list( - raw=True, - pickle_id=self.job_runner.pickle_id, - mark_success=self.job_runner.mark_success, - job_id=self.job_runner.job.id, - pool=self.job_runner.pool, - cfg_path=cfg_path, - ) - self.process = None - - def _read_task_logs(self, stream): - while True: - line = stream.readline() - if isinstance(line, bytes): - line = line.decode("utf-8") - if not line: - break - self.log.info( - "Job %s: Subtask %s %s", - self._task_instance.job_id, - self._task_instance.task_id, - line.rstrip("\n"), - ) - - def run_command(self, run_with=None) -> subprocess.Popen: - """ - Run the task command. - - :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']`` - :return: the process that was run - """ - run_with = run_with or [] - full_cmd = run_with + self._command - - self.log.info("Running on host: %s", get_hostname()) - self.log.info("Running: %s", full_cmd) - with _airflow_parsing_context_manager( - dag_id=self._task_instance.dag_id, - task_id=self._task_instance.task_id, - ): - if IS_WINDOWS: - proc = subprocess.Popen( - full_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - close_fds=True, - env=os.environ.copy(), - ) - else: - proc = subprocess.Popen( - full_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True, - close_fds=True, - env=os.environ.copy(), - preexec_fn=os.setsid, - ) - - # Start daemon thread to read subprocess logging output - log_reader = threading.Thread( - target=self._read_task_logs, - args=(proc.stdout,), - ) - log_reader.daemon = True - log_reader.start() - return proc - - def start(self): - """Start running the task instance in a subprocess.""" - raise NotImplementedError() - - def return_code(self, timeout: float = 0.0) -> int | None: - """ - Extract the return code. - - :return: The return code associated with running the task instance or - None if the task is not yet done. - """ - raise NotImplementedError() - - def terminate(self) -> None: - """Force kill the running task instance.""" - raise NotImplementedError() - - def on_finish(self) -> None: - """Execute when this is done running.""" - if self._cfg_path and os.path.isfile(self._cfg_path): - if self.run_as_user: - subprocess.call(["sudo", "rm", self._cfg_path], close_fds=True) - else: - os.remove(self._cfg_path) - - def get_process_pid(self) -> int: - """Get the process pid.""" - if hasattr(self, "process") and self.process is not None and hasattr(self.process, "pid"): - # this is a backwards compatibility for custom task runners that were used before - # the process.pid attribute was accessed by local_task_job directly but since process - # was either subprocess.Popen or psutil.Process it was not possible to have it really - # common in the base task runner - instead we changed it to use get_process_pid method and leave - # it to the task_runner to implement it - return self.process.pid - raise NotImplementedError() diff --git a/airflow/task/task_runner/cgroup_task_runner.py b/airflow/task/task_runner/cgroup_task_runner.py deleted file mode 100644 index 41715fdbc6d5b..0000000000000 --- a/airflow/task/task_runner/cgroup_task_runner.py +++ /dev/null @@ -1,246 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -"""Task runner for cgroup to run Airflow task.""" - -from __future__ import annotations - -import os -import uuid -from typing import TYPE_CHECKING - -import psutil -from cgroupspy import trees - -from airflow.task.task_runner.base_task_runner import BaseTaskRunner -from airflow.utils import timezone -from airflow.utils.operator_resources import Resources -from airflow.utils.platform import getuser -from airflow.utils.process_utils import reap_process_group - -if TYPE_CHECKING: - from airflow.jobs.local_task_job_runner import LocalTaskJobRunner - - -class CgroupTaskRunner(BaseTaskRunner): - """ - Runs the raw Airflow task in a cgroup container. - - With containment for memory and cpu. It uses the resource requirements - defined in the task to construct the settings for the cgroup. - - Cgroup must be mounted first otherwise CgroupTaskRunner - will not be able to work. - - cgroup-bin Ubuntu package must be installed to use cgexec command. - - Note that this task runner will only work if the Airflow user has root privileges, - e.g. if the airflow user is called `airflow` then the following entries (or an even - less restrictive ones) are needed in the sudoers file (replacing - /CGROUPS_FOLDER with your system's cgroups folder, e.g. '/sys/fs/cgroup/'): - airflow ALL= (root) NOEXEC: /bin/chown /CGROUPS_FOLDER/memory/airflow/* - airflow ALL= (root) NOEXEC: !/bin/chown /CGROUPS_FOLDER/memory/airflow/*..* - airflow ALL= (root) NOEXEC: !/bin/chown /CGROUPS_FOLDER/memory/airflow/* * - airflow ALL= (root) NOEXEC: /bin/chown /CGROUPS_FOLDER/cpu/airflow/* - airflow ALL= (root) NOEXEC: !/bin/chown /CGROUPS_FOLDER/cpu/airflow/*..* - airflow ALL= (root) NOEXEC: !/bin/chown /CGROUPS_FOLDER/cpu/airflow/* * - airflow ALL= (root) NOEXEC: /bin/chmod /CGROUPS_FOLDER/memory/airflow/* - airflow ALL= (root) NOEXEC: !/bin/chmod /CGROUPS_FOLDER/memory/airflow/*..* - airflow ALL= (root) NOEXEC: !/bin/chmod /CGROUPS_FOLDER/memory/airflow/* * - airflow ALL= (root) NOEXEC: /bin/chmod /CGROUPS_FOLDER/cpu/airflow/* - airflow ALL= (root) NOEXEC: !/bin/chmod /CGROUPS_FOLDER/cpu/airflow/*..* - airflow ALL= (root) NOEXEC: !/bin/chmod /CGROUPS_FOLDER/cpu/airflow/* * - """ - - def __init__(self, job_runner: LocalTaskJobRunner): - super().__init__(job_runner=job_runner) - self.process = None - self._finished_running = False - self._cpu_shares = None - self._mem_mb_limit = None - self.mem_cgroup_name = None - self.cpu_cgroup_name = None - self._created_cpu_cgroup = False - self._created_mem_cgroup = False - self._cur_user = getuser() - - def _create_cgroup(self, path) -> trees.Node: - """ - Create the specified cgroup. - - :param path: The path of the cgroup to create. - E.g. cpu/mygroup/mysubgroup - :return: the Node associated with the created cgroup. - """ - node = trees.Tree().root - path_split = path.split(os.sep) - for path_element in path_split: - # node.name is encoded to bytes: - # https://github.com/cloudsigma/cgroupspy/blob/e705ac4ccdfe33d8ecc700e9a35a9556084449ca/cgroupspy/nodes.py#L64 - name_to_node = {x.name.decode(): x for x in node.children} - if path_element not in name_to_node: - self.log.debug("Creating cgroup %s in %s", path_element, node.path.decode()) - node = node.create_cgroup(path_element) - else: - self.log.debug( - "Not creating cgroup %s in %s since it already exists", path_element, node.path.decode() - ) - node = name_to_node[path_element] - return node - - def _delete_cgroup(self, path): - """ - Delete the specified cgroup. - - :param path: The path of the cgroup to delete. - E.g. cpu/mygroup/mysubgroup - """ - node = trees.Tree().root - path_split = path.split("/") - for path_element in path_split: - name_to_node = {x.name.decode(): x for x in node.children} - if path_element not in name_to_node: - self.log.warning("Cgroup does not exist: %s", path) - return - else: - node = name_to_node[path_element] - # node is now the leaf node - parent = node.parent - self.log.debug("Deleting cgroup %s/%s", parent, node.name) - parent.delete_cgroup(node.name.decode()) - - def start(self): - # Use bash if it's already in a cgroup - cgroups = self._get_cgroup_names() - if (cgroups.get("cpu") and cgroups.get("cpu") != "/") or ( - cgroups.get("memory") and cgroups.get("memory") != "/" - ): - self.log.debug( - "Already running in a cgroup (cpu: %s memory: %s) so not creating another one", - cgroups.get("cpu"), - cgroups.get("memory"), - ) - self.process = self.run_command() - return - - # Create a unique cgroup name - cgroup_name = f"airflow/{timezone.utcnow():%Y-%m-%d}/{uuid.uuid4()}" - - self.mem_cgroup_name = f"memory/{cgroup_name}" - self.cpu_cgroup_name = f"cpu/{cgroup_name}" - - # Get the resource requirements from the task - task = self._task_instance.task - resources = task.resources if task.resources is not None else Resources() - cpus = resources.cpus.qty - self._cpu_shares = cpus * 1024 - self._mem_mb_limit = resources.ram.qty - - # Create the memory cgroup - self.mem_cgroup_node = self._create_cgroup(self.mem_cgroup_name) - self._created_mem_cgroup = True - if self._mem_mb_limit > 0: - self.log.debug("Setting %s with %s MB of memory", self.mem_cgroup_name, self._mem_mb_limit) - self.mem_cgroup_node.controller.limit_in_bytes = self._mem_mb_limit * 1024 * 1024 - - # Create the CPU cgroup - cpu_cgroup_node = self._create_cgroup(self.cpu_cgroup_name) - self._created_cpu_cgroup = True - if self._cpu_shares > 0: - self.log.debug("Setting %s with %s CPU shares", self.cpu_cgroup_name, self._cpu_shares) - cpu_cgroup_node.controller.shares = self._cpu_shares - - # Start the process w/ cgroups - self.log.debug("Starting task process with cgroups cpu,memory: %s", cgroup_name) - self.process = self.run_command(["cgexec", "-g", f"cpu,memory:{cgroup_name}"]) - - def return_code(self, timeout: float = 0) -> int | None: - if self.process is None: - return None - return_code = self.process.poll() - # TODO(plypaul) Monitoring the control file in the cgroup fs is better than - # checking the return code here. The PR to use this is here: - # https://github.com/plypaul/airflow/blob/e144e4d41996300ffa93947f136eab7785b114ed/airflow/contrib/task_runner/cgroup_task_runner.py#L43 - # but there were some issues installing the python butter package and - # libseccomp-dev on some hosts for some reason. - # I wasn't able to track down the root cause of the package install failures, but - # we might want to revisit that approach at some other point. - if return_code == 137: - self.log.error( - "Task failed with return code of 137. This may indicate " - "that it was killed due to excessive memory usage. " - "Please consider optimizing your task or using the " - "resources argument to reserve more memory for your task" - ) - return return_code - - def terminate(self): - if self.process and psutil.pid_exists(self.process.pid): - reap_process_group(self.process.pid, self.log) - - def _log_memory_usage(self, mem_cgroup_node): - def byte_to_gb(num_bytes, precision=2): - return round(num_bytes / (1024 * 1024 * 1024), precision) - - with open(mem_cgroup_node.full_path + "/memory.max_usage_in_bytes") as f: - max_usage_in_bytes = int(f.read().strip()) - - used_gb = byte_to_gb(max_usage_in_bytes) - limit_gb = byte_to_gb(mem_cgroup_node.controller.limit_in_bytes) - - self.log.info( - "Memory max usage of the task is %s GB, while the memory limit is %s GB", used_gb, limit_gb - ) - - if max_usage_in_bytes >= mem_cgroup_node.controller.limit_in_bytes: - self.log.info( - "This task has reached the memory limit allocated by Airflow worker. " - "If it failed, try to optimize the task or reserve more memory." - ) - - def on_finish(self): - # Let the OOM watcher thread know we're done to avoid false OOM alarms - self._finished_running = True - # Clean up the cgroups - if self._created_mem_cgroup: - self._log_memory_usage(self.mem_cgroup_node) - self._delete_cgroup(self.mem_cgroup_name) - if self._created_cpu_cgroup: - self._delete_cgroup(self.cpu_cgroup_name) - super().on_finish() - - @staticmethod - def _get_cgroup_names() -> dict[str, str]: - """ - Get the mapping between the subsystem name and the cgroup name. - - :return: a mapping between the subsystem name to the cgroup name - """ - with open("/proc/self/cgroup") as file: - lines = file.readlines() - subsystem_cgroup_map = {} - for line in lines: - line_split = line.rstrip().split(":") - subsystem = line_split[1] - group_name = line_split[2] - subsystem_cgroup_map[subsystem] = group_name - return subsystem_cgroup_map - - def get_process_pid(self) -> int: - if self.process is None: - raise RuntimeError("Process is not started yet") - return self.process.pid diff --git a/airflow/template/templater.py b/airflow/template/templater.py index fc37e18e0cdcd..70be101364953 100644 --- a/airflow/template/templater.py +++ b/airflow/template/templater.py @@ -26,10 +26,12 @@ from airflow.utils.mixins import ResolveMixin if TYPE_CHECKING: + from collections.abc import Mapping + import jinja2 - from airflow import DAG from airflow.models.operator import Operator + from airflow.sdk import DAG from airflow.utils.context import Context @@ -106,7 +108,7 @@ def _do_render_template_fields( self, parent: Any, template_fields: Iterable[str], - context: Context, + context: Mapping[str, Any], jinja_env: jinja2.Environment, seen_oids: set[int], ) -> None: @@ -121,7 +123,7 @@ def _do_render_template_fields( if rendered_content: setattr(parent, attr_name, rendered_content) - def _render(self, template, context, dag: DAG | None = None) -> Any: + def _render(self, template, context, dag=None) -> Any: if dag and dag.render_template_as_native_obj: return render_template_as_native(template, context) return render_template_to_string(template, context) @@ -129,7 +131,7 @@ def _render(self, template, context, dag: DAG | None = None) -> Any: def render_template( self, content: Any, - context: Context, + context: Mapping[str, Any], jinja_env: jinja2.Environment | None = None, seen_oids: set[int] | None = None, ) -> Any: @@ -172,7 +174,8 @@ def render_template( if isinstance(value, ObjectStoragePath): return self._render_object_storage_path(value, context, jinja_env) if isinstance(value, ResolveMixin): - return value.resolve(context, include_xcom=True) + # TODO: Task-SDK: Tidy up the typing on template context + return value.resolve(context, include_xcom=True) # type: ignore[arg-type] # Fast path for common built-in collections. if value.__class__ is tuple: @@ -191,7 +194,7 @@ def render_template( return value def _render_object_storage_path( - self, value: ObjectStoragePath, context: Context, jinja_env: jinja2.Environment + self, value: ObjectStoragePath, context: Mapping[str, Any], jinja_env: jinja2.Environment ) -> ObjectStoragePath: serialized_path = value.serialize() path_version = value.__version__ @@ -201,7 +204,7 @@ def _render_object_storage_path( def _render_nested_template_fields( self, value: Any, - context: Context, + context: Mapping[str, Any], jinja_env: jinja2.Environment, seen_oids: set[int], ) -> None: diff --git a/airflow/timetables/assets.py b/airflow/timetables/assets.py index b158555590ad5..d69a8e4d80cc0 100644 --- a/airflow/timetables/assets.py +++ b/airflow/timetables/assets.py @@ -92,6 +92,6 @@ def next_dagrun_info( ) def generate_run_id(self, *, run_type: DagRunType, **kwargs: typing.Any) -> str: - if run_type != DagRunType.DATASET_TRIGGERED: + if run_type != DagRunType.ASSET_TRIGGERED: return self.timetable.generate_run_id(run_type=run_type, **kwargs) return super().generate_run_id(run_type=run_type, **kwargs) diff --git a/airflow/timetables/trigger.py b/airflow/timetables/trigger.py index a4666946fa7be..4488a7fdaf619 100644 --- a/airflow/timetables/trigger.py +++ b/airflow/timetables/trigger.py @@ -21,7 +21,7 @@ from airflow.timetables._cron import CronMixin from airflow.timetables.base import DagRunInfo, DataInterval, Timetable -from airflow.utils import timezone +from airflow.utils.timezone import coerce_datetime, utcnow if TYPE_CHECKING: from dateutil.relativedelta import relativedelta @@ -43,6 +43,24 @@ class CronTriggerTimetable(CronMixin, Timetable): for one data interval to pass. Don't pass ``@once`` in here; use ``OnceTimetable`` instead. + + :param cron: cron string that defines when to run + :param timezone: Which timezone to use to interpret the cron string + :param interval: timedelta that defines the data interval start. Default 0. + + *run_immediately* controls, if no *start_time* is given to the DAG, when + the first run of the DAG should be scheduled. It has no effect if there + already exist runs for this DAG. + + * If *True*, always run immediately the most recent possible DAG run. + * If *False*, wait to run until the next scheduled time in the future. + * If passed a ``timedelta``, will run the most recent possible DAG run + if that run's ``data_interval_end`` is within timedelta of now. + * If *None*, the timedelta is calculated as 10% of the time between the + most recent past scheduled time and the next scheduled time. E.g. if + running every hour, this would run the previous time if less than 6 + minutes had past since the previous run time, otherwise it would wait + until the next hour. """ def __init__( @@ -51,9 +69,11 @@ def __init__( *, timezone: str | Timezone | FixedTimezone, interval: datetime.timedelta | relativedelta = datetime.timedelta(), + run_immediately: bool | datetime.timedelta = False, ) -> None: super().__init__(cron, timezone) self._interval = interval + self.run_immediately = run_immediately @classmethod def deserialize(cls, data: dict[str, Any]) -> Timetable: @@ -64,7 +84,21 @@ def deserialize(cls, data: dict[str, Any]) -> Timetable: interval = decode_relativedelta(data["interval"]) else: interval = datetime.timedelta(seconds=data["interval"]) - return cls(data["expression"], timezone=decode_timezone(data["timezone"]), interval=interval) + + immediate: bool | datetime.timedelta + if "immediate" not in data: + immediate = False + elif isinstance(data["immediate"], float): + immediate = datetime.timedelta(seconds=data["interval"]) + else: + immediate = data["immediate"] + + return cls( + data["expression"], + timezone=decode_timezone(data["timezone"]), + interval=interval, + run_immediately=immediate, + ) def serialize(self) -> dict[str, Any]: from airflow.serialization.serialized_objects import encode_relativedelta, encode_timezone @@ -75,7 +109,17 @@ def serialize(self) -> dict[str, Any]: else: interval = encode_relativedelta(self._interval) timezone = encode_timezone(self._timezone) - return {"expression": self._expression, "timezone": timezone, "interval": interval} + immediate: bool | float + if isinstance(self.run_immediately, datetime.timedelta): + immediate = self.run_immediately.total_seconds() + else: + immediate = self.run_immediately + return { + "expression": self._expression, + "timezone": timezone, + "interval": interval, + "run_immediately": immediate, + } def infer_manual_data_interval(self, *, run_after: DateTime) -> DataInterval: return DataInterval( @@ -95,13 +139,16 @@ def next_dagrun_info( if last_automated_data_interval is not None: next_start_time = self._get_next(last_automated_data_interval.end) elif restriction.earliest is None: - return None # Don't know where to catch up from, give up. + next_start_time = self._calc_first_run() else: next_start_time = self._align_to_next(restriction.earliest) else: - start_time_candidates = [self._align_to_prev(timezone.coerce_datetime(timezone.utcnow()))] + start_time_candidates = [self._align_to_prev(coerce_datetime(utcnow()))] if last_automated_data_interval is not None: start_time_candidates.append(self._get_next(last_automated_data_interval.end)) + elif restriction.earliest is None: + # Run immediately has no effect if there is restriction on earliest + start_time_candidates.append(self._calc_first_run()) if restriction.earliest is not None: start_time_candidates.append(self._align_to_next(restriction.earliest)) next_start_time = max(start_time_candidates) @@ -113,3 +160,27 @@ def next_dagrun_info( next_start_time - self._interval, # type: ignore[arg-type] next_start_time, ) + + def _calc_first_run(self): + """ + If no start_time is set, determine the start. + + If True, always prefer past run, if False, never. If None, if within 10% of next run, + if timedelta, if within that timedelta from past run. + """ + now = coerce_datetime(utcnow()) + past_run_time = self._align_to_prev(now) + next_run_time = self._align_to_next(now) + if self.run_immediately is True: # not truthy, actually set to True + return past_run_time + + gap_between_runs = next_run_time - past_run_time + gap_to_past = now - past_run_time + if isinstance(self.run_immediately, datetime.timedelta): + buffer_between_runs = self.run_immediately + else: + buffer_between_runs = max(gap_between_runs / 10, datetime.timedelta(minutes=5)) + if gap_to_past <= buffer_between_runs: + return past_run_time + else: + return next_run_time diff --git a/airflow/traces/otel_tracer.py b/airflow/traces/otel_tracer.py index c6d493db1427a..f85d3856ae7b3 100644 --- a/airflow/traces/otel_tracer.py +++ b/airflow/traces/otel_tracer.py @@ -57,7 +57,7 @@ class OtelTrace: When OTEL is enabled, the Trace class will be replaced by this class. """ - def __init__(self, span_exporter: ConsoleSpanExporter | OTLPSpanExporter, tag_string: str | None = None): + def __init__(self, span_exporter: OTLPSpanExporter, tag_string: str | None = None): self.span_exporter = span_exporter self.span_processor = BatchSpanProcessor(self.span_exporter) self.tag_string = tag_string @@ -75,6 +75,10 @@ def get_tracer( ) else: tracer_provider = TracerProvider(resource=resource) + debug = conf.getboolean("traces", "otel_debugging_on") + if debug is True: + log.info("[ConsoleSpanExporter] is being used") + tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) tracer_provider.add_span_processor(self.span_processor) tracer = tracer_provider.get_tracer(component) """ @@ -265,21 +269,16 @@ def get_otel_tracer(cls) -> OtelTrace: """Get OTEL tracer from airflow configuration.""" host = conf.get("traces", "otel_host") port = conf.getint("traces", "otel_port") - debug = conf.getboolean("traces", "otel_debugging_on") ssl_active = conf.getboolean("traces", "otel_ssl_active") tag_string = cls.get_constant_tags() - if debug is True: - log.info("[ConsoleSpanExporter] is being used") - return OtelTrace(span_exporter=ConsoleSpanExporter(), tag_string=tag_string) - else: - protocol = "https" if ssl_active else "http" - endpoint = f"{protocol}://{host}:{port}/v1/traces" - log.info("[OTLPSpanExporter] Connecting to OpenTelemetry Collector at %s", endpoint) - return OtelTrace( - span_exporter=OTLPSpanExporter(endpoint=endpoint, headers={"Content-Type": "application/json"}), - tag_string=tag_string, - ) + protocol = "https" if ssl_active else "http" + endpoint = f"{protocol}://{host}:{port}/v1/traces" + log.info("[OTLPSpanExporter] Connecting to OpenTelemetry Collector at %s", endpoint) + return OtelTrace( + span_exporter=OTLPSpanExporter(endpoint=endpoint, headers={"Content-Type": "application/json"}), + tag_string=tag_string, + ) class AirflowOtelIdGenerator(IdGenerator): diff --git a/airflow/typing_compat.py b/airflow/typing_compat.py index ba96c92d77f0c..946fdce5f26c5 100644 --- a/airflow/typing_compat.py +++ b/airflow/typing_compat.py @@ -24,6 +24,7 @@ "ParamSpec", "Protocol", "Self", + "TypeAlias", "TypedDict", "TypeGuard", "runtime_checkable", @@ -43,9 +44,9 @@ from typing_extensions import Literal # type: ignore[assignment] if sys.version_info >= (3, 10): - from typing import ParamSpec, TypeGuard + from typing import ParamSpec, TypeAlias, TypeGuard else: - from typing_extensions import ParamSpec, TypeGuard + from typing_extensions import ParamSpec, TypeAlias, TypeGuard if sys.version_info >= (3, 11): from typing import Self diff --git a/airflow/ui/.prettierrc b/airflow/ui/.prettierrc new file mode 100644 index 0000000000000..93ba8a38a47fe --- /dev/null +++ b/airflow/ui/.prettierrc @@ -0,0 +1,13 @@ +{ + "$schema": "http://json.schemastore.org/prettierrc", + "endOfLine": "lf", + "importOrder": ["", "^(src|openapi)/", "^[./]"], + "importOrderSeparation": true, + "jsxSingleQuote": false, + "plugins": ["@trivago/prettier-plugin-sort-imports"], + "printWidth": 80, + "singleQuote": false, + "tabWidth": 2, + "trailingComma": "all", + "useTabs": false +} diff --git a/airflow/ui/openapi-gen/queries/common.ts b/airflow/ui/openapi-gen/queries/common.ts index 2a05104c8fa70..75a343ce74c7c 100644 --- a/airflow/ui/openapi-gen/queries/common.ts +++ b/airflow/ui/openapi-gen/queries/common.ts @@ -3,14 +3,24 @@ import { UseQueryResult } from "@tanstack/react-query"; import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, + DagSourceService, + DagWarningService, + DagsService, DashboardService, + EventLogService, MonitorService, + PluginService, + PoolService, + ProviderService, + TaskInstanceService, VariableService, + VersionService, } from "../requests/services.gen"; -import { DagRunState } from "../requests/types.gen"; +import { DagRunState, DagWarningType } from "../requests/types.gen"; export type AssetServiceNextRunAssetsDefaultResponse = Awaited< ReturnType @@ -50,6 +60,98 @@ export const UseDashboardServiceHistoricalMetricsKeyFn = ( useDashboardServiceHistoricalMetricsKey, ...(queryKey ?? [{ endDate, startDate }]), ]; +export type DagsServiceRecentDagRunsDefaultResponse = Awaited< + ReturnType +>; +export type DagsServiceRecentDagRunsQueryResult< + TData = DagsServiceRecentDagRunsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagsServiceRecentDagRunsKey = "DagsServiceRecentDagRuns"; +export const UseDagsServiceRecentDagRunsKeyFn = ( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunsLimit?: number; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + onlyActive?: boolean; + owners?: string[]; + paused?: boolean; + tags?: string[]; + } = {}, + queryKey?: Array, +) => [ + useDagsServiceRecentDagRunsKey, + ...(queryKey ?? [ + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }, + ]), +]; +export type BackfillServiceListBackfillsDefaultResponse = Awaited< + ReturnType +>; +export type BackfillServiceListBackfillsQueryResult< + TData = BackfillServiceListBackfillsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useBackfillServiceListBackfillsKey = + "BackfillServiceListBackfills"; +export const UseBackfillServiceListBackfillsKeyFn = ( + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, + queryKey?: Array, +) => [ + useBackfillServiceListBackfillsKey, + ...(queryKey ?? [{ dagId, limit, offset, orderBy }]), +]; +export type BackfillServiceGetBackfillDefaultResponse = Awaited< + ReturnType +>; +export type BackfillServiceGetBackfillQueryResult< + TData = BackfillServiceGetBackfillDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useBackfillServiceGetBackfillKey = "BackfillServiceGetBackfill"; +export const UseBackfillServiceGetBackfillKeyFn = ( + { + backfillId, + }: { + backfillId: string; + }, + queryKey?: Array, +) => [useBackfillServiceGetBackfillKey, ...(queryKey ?? [{ backfillId }])]; export type DagServiceGetDagsDefaultResponse = Awaited< ReturnType >; @@ -100,6 +202,31 @@ export const UseDagServiceGetDagsKeyFn = ( }, ]), ]; +export type DagServiceGetDagTagsDefaultResponse = Awaited< + ReturnType +>; +export type DagServiceGetDagTagsQueryResult< + TData = DagServiceGetDagTagsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagServiceGetDagTagsKey = "DagServiceGetDagTags"; +export const UseDagServiceGetDagTagsKeyFn = ( + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, + queryKey?: Array, +) => [ + useDagServiceGetDagTagsKey, + ...(queryKey ?? [{ limit, offset, orderBy, tagNamePattern }]), +]; export type DagServiceGetDagDefaultResponse = Awaited< ReturnType >; @@ -176,22 +303,6 @@ export const UseConnectionServiceGetConnectionsKeyFn = ( useConnectionServiceGetConnectionsKey, ...(queryKey ?? [{ limit, offset, orderBy }]), ]; -export type VariableServiceGetVariableDefaultResponse = Awaited< - ReturnType ->; -export type VariableServiceGetVariableQueryResult< - TData = VariableServiceGetVariableDefaultResponse, - TError = unknown, -> = UseQueryResult; -export const useVariableServiceGetVariableKey = "VariableServiceGetVariable"; -export const UseVariableServiceGetVariableKeyFn = ( - { - variableKey, - }: { - variableKey: string; - }, - queryKey?: Array, -) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; export type DagRunServiceGetDagRunDefaultResponse = Awaited< ReturnType >; @@ -210,6 +321,44 @@ export const UseDagRunServiceGetDagRunKeyFn = ( }, queryKey?: Array, ) => [useDagRunServiceGetDagRunKey, ...(queryKey ?? [{ dagId, dagRunId }])]; +export type DagSourceServiceGetDagSourceDefaultResponse = Awaited< + ReturnType +>; +export type DagSourceServiceGetDagSourceQueryResult< + TData = DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagSourceServiceGetDagSourceKey = + "DagSourceServiceGetDagSource"; +export const UseDagSourceServiceGetDagSourceKeyFn = ( + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, + queryKey?: Array, +) => [ + useDagSourceServiceGetDagSourceKey, + ...(queryKey ?? [{ accept, fileToken }]), +]; +export type EventLogServiceGetEventLogDefaultResponse = Awaited< + ReturnType +>; +export type EventLogServiceGetEventLogQueryResult< + TData = EventLogServiceGetEventLogDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useEventLogServiceGetEventLogKey = "EventLogServiceGetEventLog"; +export const UseEventLogServiceGetEventLogKeyFn = ( + { + eventLogId, + }: { + eventLogId: number; + }, + queryKey?: Array, +) => [useEventLogServiceGetEventLogKey, ...(queryKey ?? [{ eventLogId }])]; export type MonitorServiceGetHealthDefaultResponse = Awaited< ReturnType >; @@ -222,15 +371,237 @@ export const UseMonitorServiceGetHealthKeyFn = (queryKey?: Array) => [ useMonitorServiceGetHealthKey, ...(queryKey ?? []), ]; +export type DagWarningServiceListDagWarningsDefaultResponse = Awaited< + ReturnType +>; +export type DagWarningServiceListDagWarningsQueryResult< + TData = DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useDagWarningServiceListDagWarningsKey = + "DagWarningServiceListDagWarnings"; +export const UseDagWarningServiceListDagWarningsKeyFn = ( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: Array, +) => [ + useDagWarningServiceListDagWarningsKey, + ...(queryKey ?? [{ dagId, limit, offset, orderBy, warningType }]), +]; +export type PluginServiceGetPluginsDefaultResponse = Awaited< + ReturnType +>; +export type PluginServiceGetPluginsQueryResult< + TData = PluginServiceGetPluginsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const usePluginServiceGetPluginsKey = "PluginServiceGetPlugins"; +export const UsePluginServiceGetPluginsKeyFn = ( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: Array, +) => [usePluginServiceGetPluginsKey, ...(queryKey ?? [{ limit, offset }])]; +export type PoolServiceGetPoolDefaultResponse = Awaited< + ReturnType +>; +export type PoolServiceGetPoolQueryResult< + TData = PoolServiceGetPoolDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const usePoolServiceGetPoolKey = "PoolServiceGetPool"; +export const UsePoolServiceGetPoolKeyFn = ( + { + poolName, + }: { + poolName: string; + }, + queryKey?: Array, +) => [usePoolServiceGetPoolKey, ...(queryKey ?? [{ poolName }])]; +export type PoolServiceGetPoolsDefaultResponse = Awaited< + ReturnType +>; +export type PoolServiceGetPoolsQueryResult< + TData = PoolServiceGetPoolsDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const usePoolServiceGetPoolsKey = "PoolServiceGetPools"; +export const UsePoolServiceGetPoolsKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [usePoolServiceGetPoolsKey, ...(queryKey ?? [{ limit, offset, orderBy }])]; +export type ProviderServiceGetProvidersDefaultResponse = Awaited< + ReturnType +>; +export type ProviderServiceGetProvidersQueryResult< + TData = ProviderServiceGetProvidersDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useProviderServiceGetProvidersKey = "ProviderServiceGetProviders"; +export const UseProviderServiceGetProvidersKeyFn = ( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: Array, +) => [useProviderServiceGetProvidersKey, ...(queryKey ?? [{ limit, offset }])]; +export type TaskInstanceServiceGetTaskInstanceDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetTaskInstanceQueryResult< + TData = TaskInstanceServiceGetTaskInstanceDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetTaskInstanceKey = + "TaskInstanceServiceGetTaskInstance"; +export const UseTaskInstanceServiceGetTaskInstanceKeyFn = ( + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetTaskInstanceKey, + ...(queryKey ?? [{ dagId, dagRunId, taskId }]), +]; +export type TaskInstanceServiceGetMappedTaskInstanceDefaultResponse = Awaited< + ReturnType +>; +export type TaskInstanceServiceGetMappedTaskInstanceQueryResult< + TData = TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useTaskInstanceServiceGetMappedTaskInstanceKey = + "TaskInstanceServiceGetMappedTaskInstance"; +export const UseTaskInstanceServiceGetMappedTaskInstanceKeyFn = ( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: Array, +) => [ + useTaskInstanceServiceGetMappedTaskInstanceKey, + ...(queryKey ?? [{ dagId, dagRunId, mapIndex, taskId }]), +]; +export type VariableServiceGetVariableDefaultResponse = Awaited< + ReturnType +>; +export type VariableServiceGetVariableQueryResult< + TData = VariableServiceGetVariableDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useVariableServiceGetVariableKey = "VariableServiceGetVariable"; +export const UseVariableServiceGetVariableKeyFn = ( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: Array, +) => [useVariableServiceGetVariableKey, ...(queryKey ?? [{ variableKey }])]; +export type VariableServiceGetVariablesDefaultResponse = Awaited< + ReturnType +>; +export type VariableServiceGetVariablesQueryResult< + TData = VariableServiceGetVariablesDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useVariableServiceGetVariablesKey = "VariableServiceGetVariables"; +export const UseVariableServiceGetVariablesKeyFn = ( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: Array, +) => [ + useVariableServiceGetVariablesKey, + ...(queryKey ?? [{ limit, offset, orderBy }]), +]; +export type VersionServiceGetVersionDefaultResponse = Awaited< + ReturnType +>; +export type VersionServiceGetVersionQueryResult< + TData = VersionServiceGetVersionDefaultResponse, + TError = unknown, +> = UseQueryResult; +export const useVersionServiceGetVersionKey = "VersionServiceGetVersion"; +export const UseVersionServiceGetVersionKeyFn = (queryKey?: Array) => [ + useVersionServiceGetVersionKey, + ...(queryKey ?? []), +]; +export type BackfillServiceCreateBackfillMutationResult = Awaited< + ReturnType +>; +export type PoolServicePostPoolMutationResult = Awaited< + ReturnType +>; export type VariableServicePostVariableMutationResult = Awaited< ReturnType >; +export type BackfillServicePauseBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceUnpauseBackfillMutationResult = Awaited< + ReturnType +>; +export type BackfillServiceCancelBackfillMutationResult = Awaited< + ReturnType +>; export type DagServicePatchDagsMutationResult = Awaited< ReturnType >; export type DagServicePatchDagMutationResult = Awaited< ReturnType >; +export type DagRunServicePatchDagRunStateMutationResult = Awaited< + ReturnType +>; +export type PoolServicePatchPoolMutationResult = Awaited< + ReturnType +>; export type VariableServicePatchVariableMutationResult = Awaited< ReturnType >; @@ -240,9 +611,12 @@ export type DagServiceDeleteDagMutationResult = Awaited< export type ConnectionServiceDeleteConnectionMutationResult = Awaited< ReturnType >; -export type VariableServiceDeleteVariableMutationResult = Awaited< - ReturnType ->; export type DagRunServiceDeleteDagRunMutationResult = Awaited< ReturnType >; +export type PoolServiceDeletePoolMutationResult = Awaited< + ReturnType +>; +export type VariableServiceDeleteVariableMutationResult = Awaited< + ReturnType +>; diff --git a/airflow/ui/openapi-gen/queries/prefetch.ts b/airflow/ui/openapi-gen/queries/prefetch.ts index 28145dc536470..63e8d4b43132a 100644 --- a/airflow/ui/openapi-gen/queries/prefetch.ts +++ b/airflow/ui/openapi-gen/queries/prefetch.ts @@ -3,14 +3,24 @@ import { type QueryClient } from "@tanstack/react-query"; import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, + DagSourceService, + DagWarningService, + DagsService, DashboardService, + EventLogService, MonitorService, + PluginService, + PoolService, + ProviderService, + TaskInstanceService, VariableService, + VersionService, } from "../requests/services.gen"; -import { DagRunState } from "../requests/types.gen"; +import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -58,6 +68,129 @@ export const prefetchUseDashboardServiceHistoricalMetrics = ( }), queryFn: () => DashboardService.historicalMetrics({ endDate, startDate }), }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.onlyActive + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagsServiceRecentDagRuns = ( + queryClient: QueryClient, + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunsLimit?: number; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + onlyActive?: boolean; + owners?: string[]; + paused?: boolean; + tags?: string[]; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagsServiceRecentDagRunsKeyFn({ + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }), + queryFn: () => + DagsService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }), + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseBackfillServiceListBackfills = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn({ + dagId, + limit, + offset, + orderBy, + }), + queryFn: () => + BackfillService.listBackfills({ dagId, limit, offset, orderBy }), + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ +export const prefetchUseBackfillServiceGetBackfill = ( + queryClient: QueryClient, + { + backfillId, + }: { + backfillId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn({ backfillId }), + queryFn: () => BackfillService.getBackfill({ backfillId }), + }); /** * Get Dags * Get all DAGs. @@ -128,6 +261,41 @@ export const prefetchUseDagServiceGetDags = ( tags, }), }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagServiceGetDagTags = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn({ + limit, + offset, + orderBy, + tagNamePattern, + }), + queryFn: () => + DagService.getDagTags({ limit, offset, orderBy, tagNamePattern }), + }); /** * Get Dag * Get basic information about a DAG. @@ -218,6 +386,290 @@ export const prefetchUseConnectionServiceGetConnections = ( }), queryFn: () => ConnectionService.getConnections({ limit, offset, orderBy }), }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagRunServiceGetDagRun = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagSourceServiceGetDagSource = ( + queryClient: QueryClient, + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn({ + accept, + fileToken, + }), + queryFn: () => DagSourceService.getDagSource({ accept, fileToken }), + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const prefetchUseEventLogServiceGetEventLog = ( + queryClient: QueryClient, + { + eventLogId, + }: { + eventLogId: number; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn({ eventLogId }), + queryFn: () => EventLogService.getEventLog({ eventLogId }), + }); +/** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ +export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => + queryClient.prefetchQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(), + queryFn: () => MonitorService.getHealth(), + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseDagWarningServiceListDagWarnings = ( + queryClient: QueryClient, + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn({ + dagId, + limit, + offset, + orderBy, + warningType, + }), + queryFn: () => + DagWarningService.listDagWarnings({ + dagId, + limit, + offset, + orderBy, + warningType, + }), + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePluginServiceGetPlugins = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn({ limit, offset }), + queryFn: () => PluginService.getPlugins({ limit, offset }), + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePoolServiceGetPool = ( + queryClient: QueryClient, + { + poolName, + }: { + poolName: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }), + queryFn: () => PoolService.getPool({ poolName }), + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUsePoolServiceGetPools = ( + queryClient: QueryClient, + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn({ limit, offset, orderBy }), + queryFn: () => PoolService.getPools({ limit, offset, orderBy }), + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const prefetchUseProviderServiceGetProviders = ( + queryClient: QueryClient, + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn({ limit, offset }), + queryFn: () => ProviderService.getProviders({ limit, offset }), + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetTaskInstance = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn({ + dagId, + dagRunId, + taskId, + }), + queryFn: () => + TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }), + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const prefetchUseTaskInstanceServiceGetMappedTaskInstance = ( + queryClient: QueryClient, + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, +) => + queryClient.prefetchQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + queryFn: () => + TaskInstanceService.getMappedTaskInstance({ + dagId, + dagRunId, + mapIndex, + taskId, + }), + }); /** * Get Variable * Get a variable entry. @@ -239,34 +691,43 @@ export const prefetchUseVariableServiceGetVariable = ( queryFn: () => VariableService.getVariable({ variableKey }), }); /** - * Get Dag Run + * Get Variables + * Get all Variables entries. * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response * @throws ApiError */ -export const prefetchUseDagRunServiceGetDagRun = ( +export const prefetchUseVariableServiceGetVariables = ( queryClient: QueryClient, { - dagId, - dagRunId, + limit, + offset, + orderBy, }: { - dagId: string; - dagRunId: string; - }, + limit?: number; + offset?: number; + orderBy?: string; + } = {}, ) => queryClient.prefetchQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn({ dagId, dagRunId }), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }), + queryKey: Common.UseVariableServiceGetVariablesKeyFn({ + limit, + offset, + orderBy, + }), + queryFn: () => VariableService.getVariables({ limit, offset, orderBy }), }); /** - * Get Health - * @returns HealthInfoSchema Successful Response + * Get Version + * Get version information. + * @returns VersionInfo Successful Response * @throws ApiError */ -export const prefetchUseMonitorServiceGetHealth = (queryClient: QueryClient) => +export const prefetchUseVersionServiceGetVersion = (queryClient: QueryClient) => queryClient.prefetchQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(), - queryFn: () => MonitorService.getHealth(), + queryKey: Common.UseVersionServiceGetVersionKeyFn(), + queryFn: () => VersionService.getVersion(), }); diff --git a/airflow/ui/openapi-gen/queries/queries.ts b/airflow/ui/openapi-gen/queries/queries.ts index 7009a7dee7afe..5f8f649372db6 100644 --- a/airflow/ui/openapi-gen/queries/queries.ts +++ b/airflow/ui/openapi-gen/queries/queries.ts @@ -8,14 +8,33 @@ import { import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, + DagSourceService, + DagWarningService, + DagsService, DashboardService, + EventLogService, MonitorService, + PluginService, + PoolService, + ProviderService, + TaskInstanceService, VariableService, + VersionService, } from "../requests/services.gen"; -import { DAGPatchBody, DagRunState, VariableBody } from "../requests/types.gen"; +import { + BackfillPostBody, + DAGPatchBody, + DAGRunPatchBody, + DagRunState, + DagWarningType, + PoolPatchBody, + PoolPostBody, + VariableBody, +} from "../requests/types.gen"; import * as Common from "./common"; /** @@ -76,6 +95,151 @@ export const useDashboardServiceHistoricalMetrics = < DashboardService.historicalMetrics({ endDate, startDate }) as TData, ...options, }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.onlyActive + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagsServiceRecentDagRuns = < + TData = Common.DagsServiceRecentDagRunsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunsLimit?: number; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + onlyActive?: boolean; + owners?: string[]; + paused?: boolean; + tags?: string[]; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagsServiceRecentDagRunsKeyFn( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }, + queryKey, + ), + queryFn: () => + DagsService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }) as TData, + ...options, + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceListBackfills = < + TData = Common.BackfillServiceListBackfillsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn( + { dagId, limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceGetBackfill = < + TData = Common.BackfillServiceGetBackfillDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + backfillId, + }: { + backfillId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn( + { backfillId }, + queryKey, + ), + queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, + ...options, + }); /** * Get Dags * Get all DAGs. @@ -155,6 +319,50 @@ export const useDagServiceGetDags = < }) as TData, ...options, }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagTags = < + TData = Common.DagServiceGetDagTagsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn( + { limit, offset, orderBy, tagNamePattern }, + queryKey, + ), + queryFn: () => + DagService.getDagTags({ + limit, + offset, + orderBy, + tagNamePattern, + }) as TData, + ...options, + }); /** * Get Dag * Get basic information about a DAG. @@ -273,93 +481,650 @@ export const useConnectionServiceGetConnections = < ...options, }); /** - * Get Variable - * Get a variable entry. + * Get Dag Run * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response * @throws ApiError */ -export const useVariableServiceGetVariable = < - TData = Common.VariableServiceGetVariableDefaultResponse, +export const useDagRunServiceGetDagRun = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - variableKey, + dagId, + dagRunId, }: { - variableKey: string; + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn( + { dagId, dagRunId }, + queryKey, + ), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const useDagSourceServiceGetDagSource = < + TData = Common.DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn( + { accept, fileToken }, + queryKey, + ), + queryFn: () => + DagSourceService.getDagSource({ accept, fileToken }) as TData, + ...options, + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLog = < + TData = Common.EventLogServiceGetEventLogDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + eventLogId, + }: { + eventLogId: number; }, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => - useQuery({ - queryKey: Common.UseVariableServiceGetVariableKeyFn( - { variableKey }, - queryKey, - ), - queryFn: () => VariableService.getVariable({ variableKey }) as TData, + useQuery({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn( + { eventLogId }, + queryKey, + ), + queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, + ...options, + }); +/** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ +export const useMonitorServiceGetHealth = < + TData = Common.MonitorServiceGetHealthDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), + queryFn: () => MonitorService.getHealth() as TData, + ...options, + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagWarningServiceListDagWarnings = < + TData = Common.DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( + { dagId, limit, offset, orderBy, warningType }, + queryKey, + ), + queryFn: () => + DagWarningService.listDagWarnings({ + dagId, + limit, + offset, + orderBy, + warningType, + }) as TData, + ...options, + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const usePluginServiceGetPlugins = < + TData = Common.PluginServiceGetPluginsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn( + { limit, offset }, + queryKey, + ), + queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, + ...options, + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPool = < + TData = Common.PoolServiceGetPoolDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + poolName, + }: { + poolName: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), + queryFn: () => PoolService.getPool({ poolName }) as TData, + ...options, + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPools = < + TData = Common.PoolServiceGetPoolsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => PoolService.getPools({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const useProviderServiceGetProviders = < + TData = Common.ProviderServiceGetProvidersDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn( + { limit, offset }, + queryKey, + ), + queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, + ...options, + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstance = < + TData = Common.TaskInstanceServiceGetTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn( + { dagId, dagRunId, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstance = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstance({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); +/** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariable = < + TData = Common.VariableServiceGetVariableDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + variableKey, + }: { + variableKey: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVariableServiceGetVariableKeyFn( + { variableKey }, + queryKey, + ), + queryFn: () => VariableService.getVariable({ variableKey }) as TData, + ...options, + }); +/** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ +export const useVariableServiceGetVariables = < + TData = Common.VariableServiceGetVariablesDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVariableServiceGetVariablesKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + VariableService.getVariables({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Version + * Get version information. + * @returns VersionInfo Successful Response + * @throws ApiError + */ +export const useVersionServiceGetVersion = < + TData = Common.VersionServiceGetVersionDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useQuery({ + queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), + queryFn: () => VersionService.getVersion() as TData, + ...options, + }); +/** + * Create Backfill + * @param data The data for the request. + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceCreateBackfill = < + TData = Common.BackfillServiceCreateBackfillMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: BackfillPostBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + BackfillService.createBackfill({ + requestBody, + }) as unknown as Promise, + ...options, + }); +/** + * Post Pool + * Create a Pool. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServicePostPool = < + TData = Common.PoolServicePostPoolMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: PoolPostBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: PoolPostBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + PoolService.postPool({ requestBody }) as unknown as Promise, + ...options, + }); +/** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ +export const useVariableServicePostVariable = < + TData = Common.VariableServicePostVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + requestBody: VariableBody; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + requestBody: VariableBody; + }, + TContext + >({ + mutationFn: ({ requestBody }) => + VariableService.postVariable({ + requestBody, + }) as unknown as Promise, ...options, }); /** - * Get Dag Run + * Pause Backfill * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const useDagRunServiceGetDagRun = < - TData = Common.DagRunServiceGetDagRunDefaultResponse, +export const useBackfillServicePauseBackfill = < + TData = Common.BackfillServicePauseBackfillMutationResult, TError = unknown, - TQueryKey extends Array = unknown[], + TContext = unknown, >( - { - dagId, - dagRunId, - }: { - dagId: string; - dagRunId: string; - }, - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >, + "mutationFn" + >, ) => - useQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn( - { dagId, dagRunId }, - queryKey, - ), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + useMutation< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.pauseBackfill({ + backfillId, + }) as unknown as Promise, ...options, }); /** - * Get Health - * @returns HealthInfoSchema Successful Response + * Unpause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const useMonitorServiceGetHealth = < - TData = Common.MonitorServiceGetHealthDefaultResponse, +export const useBackfillServiceUnpauseBackfill = < + TData = Common.BackfillServiceUnpauseBackfillMutationResult, TError = unknown, - TQueryKey extends Array = unknown[], + TContext = unknown, >( - queryKey?: TQueryKey, - options?: Omit, "queryKey" | "queryFn">, + options?: Omit< + UseMutationOptions< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >, + "mutationFn" + >, ) => - useQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), - queryFn: () => MonitorService.getHealth() as TData, + useMutation< + TData, + TError, + { + backfillId: unknown; + }, + TContext + >({ + mutationFn: ({ backfillId }) => + BackfillService.unpauseBackfill({ + backfillId, + }) as unknown as Promise, ...options, }); /** - * Post Variable - * Create a variable. + * Cancel Backfill * @param data The data for the request. - * @param data.requestBody - * @returns VariableResponse Successful Response + * @param data.backfillId + * @returns unknown Successful Response * @throws ApiError */ -export const useVariableServicePostVariable = < - TData = Common.VariableServicePostVariableMutationResult, +export const useBackfillServiceCancelBackfill = < + TData = Common.BackfillServiceCancelBackfillMutationResult, TError = unknown, TContext = unknown, >( @@ -368,7 +1133,7 @@ export const useVariableServicePostVariable = < TData, TError, { - requestBody: VariableBody; + backfillId: unknown; }, TContext >, @@ -379,13 +1144,13 @@ export const useVariableServicePostVariable = < TData, TError, { - requestBody: VariableBody; + backfillId: unknown; }, TContext >({ - mutationFn: ({ requestBody }) => - VariableService.postVariable({ - requestBody, + mutationFn: ({ backfillId }) => + BackfillService.cancelBackfill({ + backfillId, }) as unknown as Promise, ...options, }); @@ -522,6 +1287,104 @@ export const useDagServicePatchDag = < }) as unknown as Promise, ...options, }); +/** + * Patch Dag Run State + * Modify a DAG Run. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.requestBody + * @param data.updateMask + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServicePatchDagRunState = < + TData = Common.DagRunServicePatchDagRunStateMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ dagId, dagRunId, requestBody, updateMask }) => + DagRunService.patchDagRunState({ + dagId, + dagRunId, + requestBody, + updateMask, + }) as unknown as Promise, + ...options, + }); +/** + * Patch Pool + * Update a Pool. + * @param data The data for the request. + * @param data.poolName + * @param data.requestBody + * @param data.updateMask + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServicePatchPool = < + TData = Common.PoolServicePatchPoolMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + poolName: string; + requestBody: PoolPatchBody; + updateMask?: string[]; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + poolName: string; + requestBody: PoolPatchBody; + updateMask?: string[]; + }, + TContext + >({ + mutationFn: ({ poolName, requestBody, updateMask }) => + PoolService.patchPool({ + poolName, + requestBody, + updateMask, + }) as unknown as Promise, + ...options, + }); /** * Patch Variable * Update a variable by key. @@ -646,15 +1509,16 @@ export const useConnectionServiceDeleteConnection = < ...options, }); /** - * Delete Variable - * Delete a variable entry. + * Delete Dag Run + * Delete a DAG Run entry. * @param data The data for the request. - * @param data.variableKey + * @param data.dagId + * @param data.dagRunId * @returns void Successful Response * @throws ApiError */ -export const useVariableServiceDeleteVariable = < - TData = Common.VariableServiceDeleteVariableMutationResult, +export const useDagRunServiceDeleteDagRun = < + TData = Common.DagRunServiceDeleteDagRunMutationResult, TError = unknown, TContext = unknown, >( @@ -663,7 +1527,8 @@ export const useVariableServiceDeleteVariable = < TData, TError, { - variableKey: string; + dagId: string; + dagRunId: string; }, TContext >, @@ -674,27 +1539,28 @@ export const useVariableServiceDeleteVariable = < TData, TError, { - variableKey: string; + dagId: string; + dagRunId: string; }, TContext >({ - mutationFn: ({ variableKey }) => - VariableService.deleteVariable({ - variableKey, + mutationFn: ({ dagId, dagRunId }) => + DagRunService.deleteDagRun({ + dagId, + dagRunId, }) as unknown as Promise, ...options, }); /** - * Delete Dag Run - * Delete a DAG Run entry. + * Delete Pool + * Delete a pool entry. * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId + * @param data.poolName * @returns void Successful Response * @throws ApiError */ -export const useDagRunServiceDeleteDagRun = < - TData = Common.DagRunServiceDeleteDagRunMutationResult, +export const usePoolServiceDeletePool = < + TData = Common.PoolServiceDeletePoolMutationResult, TError = unknown, TContext = unknown, >( @@ -703,8 +1569,7 @@ export const useDagRunServiceDeleteDagRun = < TData, TError, { - dagId: string; - dagRunId: string; + poolName: string; }, TContext >, @@ -715,15 +1580,50 @@ export const useDagRunServiceDeleteDagRun = < TData, TError, { - dagId: string; - dagRunId: string; + poolName: string; }, TContext >({ - mutationFn: ({ dagId, dagRunId }) => - DagRunService.deleteDagRun({ - dagId, - dagRunId, + mutationFn: ({ poolName }) => + PoolService.deletePool({ poolName }) as unknown as Promise, + ...options, + }); +/** + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ +export const useVariableServiceDeleteVariable = < + TData = Common.VariableServiceDeleteVariableMutationResult, + TError = unknown, + TContext = unknown, +>( + options?: Omit< + UseMutationOptions< + TData, + TError, + { + variableKey: string; + }, + TContext + >, + "mutationFn" + >, +) => + useMutation< + TData, + TError, + { + variableKey: string; + }, + TContext + >({ + mutationFn: ({ variableKey }) => + VariableService.deleteVariable({ + variableKey, }) as unknown as Promise, ...options, }); diff --git a/airflow/ui/openapi-gen/queries/suspense.ts b/airflow/ui/openapi-gen/queries/suspense.ts index 51e35f321a81c..1222b7f5536c2 100644 --- a/airflow/ui/openapi-gen/queries/suspense.ts +++ b/airflow/ui/openapi-gen/queries/suspense.ts @@ -3,14 +3,24 @@ import { UseQueryOptions, useSuspenseQuery } from "@tanstack/react-query"; import { AssetService, + BackfillService, ConnectionService, DagRunService, DagService, + DagSourceService, + DagWarningService, + DagsService, DashboardService, + EventLogService, MonitorService, + PluginService, + PoolService, + ProviderService, + TaskInstanceService, VariableService, + VersionService, } from "../requests/services.gen"; -import { DagRunState } from "../requests/types.gen"; +import { DagRunState, DagWarningType } from "../requests/types.gen"; import * as Common from "./common"; /** @@ -71,6 +81,151 @@ export const useDashboardServiceHistoricalMetricsSuspense = < DashboardService.historicalMetrics({ endDate, startDate }) as TData, ...options, }); +/** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.onlyActive + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagsServiceRecentDagRunsSuspense = < + TData = Common.DagsServiceRecentDagRunsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }: { + dagDisplayNamePattern?: string; + dagIdPattern?: string; + dagRunsLimit?: number; + lastDagRunState?: DagRunState; + limit?: number; + offset?: number; + onlyActive?: boolean; + owners?: string[]; + paused?: boolean; + tags?: string[]; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagsServiceRecentDagRunsKeyFn( + { + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }, + queryKey, + ), + queryFn: () => + DagsService.recentDagRuns({ + dagDisplayNamePattern, + dagIdPattern, + dagRunsLimit, + lastDagRunState, + limit, + offset, + onlyActive, + owners, + paused, + tags, + }) as TData, + ...options, + }); +/** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceListBackfillsSuspense = < + TData = Common.BackfillServiceListBackfillsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + }: { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseBackfillServiceListBackfillsKeyFn( + { dagId, limit, offset, orderBy }, + queryKey, + ), + queryFn: () => + BackfillService.listBackfills({ dagId, limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ +export const useBackfillServiceGetBackfillSuspense = < + TData = Common.BackfillServiceGetBackfillDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + backfillId, + }: { + backfillId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseBackfillServiceGetBackfillKeyFn( + { backfillId }, + queryKey, + ), + queryFn: () => BackfillService.getBackfill({ backfillId }) as TData, + ...options, + }); /** * Get Dags * Get all DAGs. @@ -150,6 +305,50 @@ export const useDagServiceGetDagsSuspense = < }) as TData, ...options, }); +/** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagServiceGetDagTagsSuspense = < + TData = Common.DagServiceGetDagTagsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + tagNamePattern, + }: { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagServiceGetDagTagsKeyFn( + { limit, offset, orderBy, tagNamePattern }, + queryKey, + ), + queryFn: () => + DagService.getDagTags({ + limit, + offset, + orderBy, + tagNamePattern, + }) as TData, + ...options, + }); /** * Get Dag * Get basic information about a DAG. @@ -267,6 +466,368 @@ export const useConnectionServiceGetConnectionsSuspense = < ConnectionService.getConnections({ limit, offset, orderBy }) as TData, ...options, }); +/** + * Get Dag Run + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response + * @throws ApiError + */ +export const useDagRunServiceGetDagRunSuspense = < + TData = Common.DagRunServiceGetDagRunDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + }: { + dagId: string; + dagRunId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagRunServiceGetDagRunKeyFn( + { dagId, dagRunId }, + queryKey, + ), + queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + ...options, + }); +/** + * Get Dag Source + * Get source code using file token. + * @param data The data for the request. + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response + * @throws ApiError + */ +export const useDagSourceServiceGetDagSourceSuspense = < + TData = Common.DagSourceServiceGetDagSourceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + accept, + fileToken, + }: { + accept?: string; + fileToken: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagSourceServiceGetDagSourceKeyFn( + { accept, fileToken }, + queryKey, + ), + queryFn: () => + DagSourceService.getDagSource({ accept, fileToken }) as TData, + ...options, + }); +/** + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ +export const useEventLogServiceGetEventLogSuspense = < + TData = Common.EventLogServiceGetEventLogDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + eventLogId, + }: { + eventLogId: number; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseEventLogServiceGetEventLogKeyFn( + { eventLogId }, + queryKey, + ), + queryFn: () => EventLogService.getEventLog({ eventLogId }) as TData, + ...options, + }); +/** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ +export const useMonitorServiceGetHealthSuspense = < + TData = Common.MonitorServiceGetHealthDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), + queryFn: () => MonitorService.getHealth() as TData, + ...options, + }); +/** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ +export const useDagWarningServiceListDagWarningsSuspense = < + TData = Common.DagWarningServiceListDagWarningsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + limit, + offset, + orderBy, + warningType, + }: { + dagId?: string; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseDagWarningServiceListDagWarningsKeyFn( + { dagId, limit, offset, orderBy, warningType }, + queryKey, + ), + queryFn: () => + DagWarningService.listDagWarnings({ + dagId, + limit, + offset, + orderBy, + warningType, + }) as TData, + ...options, + }); +/** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ +export const usePluginServiceGetPluginsSuspense = < + TData = Common.PluginServiceGetPluginsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePluginServiceGetPluginsKeyFn( + { limit, offset }, + queryKey, + ), + queryFn: () => PluginService.getPlugins({ limit, offset }) as TData, + ...options, + }); +/** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPoolSuspense = < + TData = Common.PoolServiceGetPoolDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + poolName, + }: { + poolName: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePoolServiceGetPoolKeyFn({ poolName }, queryKey), + queryFn: () => PoolService.getPool({ poolName }) as TData, + ...options, + }); +/** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ +export const usePoolServiceGetPoolsSuspense = < + TData = Common.PoolServiceGetPoolsDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + orderBy, + }: { + limit?: number; + offset?: number; + orderBy?: string; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UsePoolServiceGetPoolsKeyFn( + { limit, offset, orderBy }, + queryKey, + ), + queryFn: () => PoolService.getPools({ limit, offset, orderBy }) as TData, + ...options, + }); +/** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ +export const useProviderServiceGetProvidersSuspense = < + TData = Common.ProviderServiceGetProvidersDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + limit, + offset, + }: { + limit?: number; + offset?: number; + } = {}, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseProviderServiceGetProvidersKeyFn( + { limit, offset }, + queryKey, + ), + queryFn: () => ProviderService.getProviders({ limit, offset }) as TData, + ...options, + }); +/** + * Get Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetTaskInstanceSuspense = < + TData = Common.TaskInstanceServiceGetTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + taskId, + }: { + dagId: string; + dagRunId: string; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetTaskInstanceKeyFn( + { dagId, dagRunId, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getTaskInstance({ dagId, dagRunId, taskId }) as TData, + ...options, + }); +/** + * Get Mapped Task Instance + * Get task instance. + * @param data The data for the request. + * @param data.dagId + * @param data.dagRunId + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response + * @throws ApiError + */ +export const useTaskInstanceServiceGetMappedTaskInstanceSuspense = < + TData = Common.TaskInstanceServiceGetMappedTaskInstanceDefaultResponse, + TError = unknown, + TQueryKey extends Array = unknown[], +>( + { + dagId, + dagRunId, + mapIndex, + taskId, + }: { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; + }, + queryKey?: TQueryKey, + options?: Omit, "queryKey" | "queryFn">, +) => + useSuspenseQuery({ + queryKey: Common.UseTaskInstanceServiceGetMappedTaskInstanceKeyFn( + { dagId, dagRunId, mapIndex, taskId }, + queryKey, + ), + queryFn: () => + TaskInstanceService.getMappedTaskInstance({ + dagId, + dagRunId, + mapIndex, + taskId, + }) as TData, + ...options, + }); /** * Get Variable * Get a variable entry. @@ -297,43 +858,49 @@ export const useVariableServiceGetVariableSuspense = < ...options, }); /** - * Get Dag Run + * Get Variables + * Get all Variables entries. * @param data The data for the request. - * @param data.dagId - * @param data.dagRunId - * @returns DAGRunResponse Successful Response + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response * @throws ApiError */ -export const useDagRunServiceGetDagRunSuspense = < - TData = Common.DagRunServiceGetDagRunDefaultResponse, +export const useVariableServiceGetVariablesSuspense = < + TData = Common.VariableServiceGetVariablesDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( { - dagId, - dagRunId, + limit, + offset, + orderBy, }: { - dagId: string; - dagRunId: string; - }, + limit?: number; + offset?: number; + orderBy?: string; + } = {}, queryKey?: TQueryKey, options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseDagRunServiceGetDagRunKeyFn( - { dagId, dagRunId }, + queryKey: Common.UseVariableServiceGetVariablesKeyFn( + { limit, offset, orderBy }, queryKey, ), - queryFn: () => DagRunService.getDagRun({ dagId, dagRunId }) as TData, + queryFn: () => + VariableService.getVariables({ limit, offset, orderBy }) as TData, ...options, }); /** - * Get Health - * @returns HealthInfoSchema Successful Response + * Get Version + * Get version information. + * @returns VersionInfo Successful Response * @throws ApiError */ -export const useMonitorServiceGetHealthSuspense = < - TData = Common.MonitorServiceGetHealthDefaultResponse, +export const useVersionServiceGetVersionSuspense = < + TData = Common.VersionServiceGetVersionDefaultResponse, TError = unknown, TQueryKey extends Array = unknown[], >( @@ -341,7 +908,7 @@ export const useMonitorServiceGetHealthSuspense = < options?: Omit, "queryKey" | "queryFn">, ) => useSuspenseQuery({ - queryKey: Common.UseMonitorServiceGetHealthKeyFn(queryKey), - queryFn: () => MonitorService.getHealth() as TData, + queryKey: Common.UseVersionServiceGetVersionKeyFn(queryKey), + queryFn: () => VersionService.getVersion() as TData, ...options, }); diff --git a/airflow/ui/openapi-gen/requests/schemas.gen.ts b/airflow/ui/openapi-gen/requests/schemas.gen.ts index 981c8f659a3dd..5624358636cb0 100644 --- a/airflow/ui/openapi-gen/requests/schemas.gen.ts +++ b/airflow/ui/openapi-gen/requests/schemas.gen.ts @@ -1,5 +1,136 @@ // This file is auto-generated by @hey-api/openapi-ts +export const $AppBuilderMenuItemResponse = { + properties: { + name: { + type: "string", + title: "Name", + }, + href: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Href", + }, + category: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Category", + }, + }, + additionalProperties: true, + type: "object", + required: ["name"], + title: "AppBuilderMenuItemResponse", + description: "Serializer for AppBuilder Menu Item responses.", +} as const; + +export const $AppBuilderViewResponse = { + properties: { + name: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Name", + }, + category: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Category", + }, + view: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "View", + }, + label: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Label", + }, + }, + additionalProperties: true, + type: "object", + title: "AppBuilderViewResponse", + description: "Serializer for AppBuilder View responses.", +} as const; + +export const $BackfillPostBody = { + properties: { + dag_id: { + type: "string", + title: "Dag Id", + }, + from_date: { + type: "string", + format: "date-time", + title: "From Date", + }, + to_date: { + type: "string", + format: "date-time", + title: "To Date", + }, + run_backwards: { + type: "boolean", + title: "Run Backwards", + default: false, + }, + dag_run_conf: { + type: "object", + title: "Dag Run Conf", + default: {}, + }, + reprocess_behavior: { + $ref: "#/components/schemas/ReprocessBehavior", + default: "none", + }, + max_active_runs: { + type: "integer", + title: "Max Active Runs", + default: 10, + }, + }, + type: "object", + required: ["dag_id", "from_date", "to_date"], + title: "BackfillPostBody", + description: "Object used for create backfill request.", +} as const; + export const $BaseInfoSchema = { properties: { status: { @@ -37,7 +168,7 @@ export const $ConnectionCollectionResponse = { type: "object", required: ["connections", "total_entries"], title: "ConnectionCollectionResponse", - description: "DAG Collection serializer for responses.", + description: "Connection Collection serializer for responses.", } as const; export const $ConnectionResponse = { @@ -206,18 +337,6 @@ export const $DAGDetailsResponse = { ], title: "Last Expired", }, - scheduler_lock: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Scheduler Lock", - }, pickle_id: { anyOf: [ { @@ -383,7 +502,7 @@ export const $DAGDetailsResponse = { ], title: "Dag Run Timeout", }, - dataset_expression: { + asset_expression: { anyOf: [ { type: "object", @@ -392,7 +511,7 @@ export const $DAGDetailsResponse = { type: "null", }, ], - title: "Dataset Expression", + title: "Asset Expression", }, doc_md: { anyOf: [ @@ -440,10 +559,6 @@ export const $DAGDetailsResponse = { ], title: "Is Paused Upon Creation", }, - orientation: { - type: "string", - title: "Orientation", - }, params: { anyOf: [ { @@ -518,7 +633,6 @@ export const $DAGDetailsResponse = { "last_parsed_time", "last_pickled", "last_expired", - "scheduler_lock", "pickle_id", "default_view", "fileloc", @@ -538,12 +652,11 @@ export const $DAGDetailsResponse = { "owners", "catchup", "dag_run_timeout", - "dataset_expression", + "asset_expression", "doc_md", "start_date", "end_date", "is_paused_upon_creation", - "orientation", "params", "render_template_as_native_obj", "template_search_path", @@ -623,18 +736,6 @@ export const $DAGResponse = { ], title: "Last Expired", }, - scheduler_lock: { - anyOf: [ - { - type: "string", - format: "date-time", - }, - { - type: "null", - }, - ], - title: "Scheduler Lock", - }, pickle_id: { anyOf: [ { @@ -800,7 +901,6 @@ export const $DAGResponse = { "last_parsed_time", "last_pickled", "last_expired", - "scheduler_lock", "pickle_id", "default_view", "fileloc", @@ -824,6 +924,25 @@ export const $DAGResponse = { description: "DAG serializer for responses.", } as const; +export const $DAGRunPatchBody = { + properties: { + state: { + $ref: "#/components/schemas/DAGRunPatchStates", + }, + }, + type: "object", + required: ["state"], + title: "DAGRunPatchBody", + description: "DAG Run Serializer for PATCH requests.", +} as const; + +export const $DAGRunPatchStates = { + type: "string", + enum: ["queued", "success", "failed"], + title: "DAGRunPatchStates", + description: "Enum for DAG Run states when updating a DAG Run.", +} as const; + export const $DAGRunResponse = { properties: { run_id: { @@ -1002,31 +1121,20 @@ export const $DAGRunTypes = { type: "integer", title: "Manual", }, - dataset_triggered: { + asset_triggered: { type: "integer", - title: "Dataset Triggered", + title: "Asset Triggered", }, }, type: "object", - required: ["backfill", "scheduled", "manual", "dataset_triggered"], + required: ["backfill", "scheduled", "manual", "asset_triggered"], title: "DAGRunTypes", description: "DAG Run Types for responses.", } as const; -export const $DagProcessorInfoSchema = { +export const $DAGSourceResponse = { properties: { - status: { - anyOf: [ - { - type: "string", - }, - { - type: "null", - }, - ], - title: "Status", - }, - latest_dag_processor_heartbeat: { + content: { anyOf: [ { type: "string", @@ -1035,132 +1143,653 @@ export const $DagProcessorInfoSchema = { type: "null", }, ], - title: "Latest Dag Processor Heartbeat", + title: "Content", }, }, type: "object", - required: ["status", "latest_dag_processor_heartbeat"], - title: "DagProcessorInfoSchema", - description: "Schema for DagProcessor info.", -} as const; - -export const $DagRunState = { - type: "string", - enum: ["queued", "running", "success", "failed"], - title: "DagRunState", - description: `All possible states that a DagRun can be in. - -These are "shared" with TaskInstanceState in some parts of the code, -so please ensure that their values always match the ones with the -same name in TaskInstanceState.`, -} as const; - -export const $DagRunTriggeredByType = { - type: "string", - enum: [ - "cli", - "operator", - "rest_api", - "ui", - "test", - "timetable", - "dataset", - "backfill", - ], - title: "DagRunTriggeredByType", - description: "Class with TriggeredBy types for DagRun.", + required: ["content"], + title: "DAGSourceResponse", + description: "DAG Source serializer for responses.", } as const; -export const $DagRunType = { - type: "string", - enum: ["backfill", "scheduled", "manual", "dataset_triggered"], - title: "DagRunType", - description: "Class with DagRun types.", -} as const; - -export const $DagTagPydantic = { +export const $DAGTagCollectionResponse = { properties: { - name: { - type: "string", - title: "Name", - }, - dag_id: { - type: "string", - title: "Dag Id", + tags: { + items: { + type: "string", + }, + type: "array", + title: "Tags", }, - }, - type: "object", - required: ["name", "dag_id"], - title: "DagTagPydantic", - description: - "Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API.", -} as const; - -export const $HTTPExceptionResponse = { - properties: { - detail: { - anyOf: [ - { - type: "string", - }, - { - type: "object", - }, - ], - title: "Detail", + total_entries: { + type: "integer", + title: "Total Entries", }, }, type: "object", - required: ["detail"], - title: "HTTPExceptionResponse", - description: "HTTPException Model used for error response.", + required: ["tags", "total_entries"], + title: "DAGTagCollectionResponse", + description: "DAG Tags Collection serializer for responses.", } as const; -export const $HTTPValidationError = { +export const $DAGWarningCollectionResponse = { properties: { - detail: { + dag_warnings: { items: { - $ref: "#/components/schemas/ValidationError", + $ref: "#/components/schemas/DAGWarningResponse", }, type: "array", - title: "Detail", + title: "Dag Warnings", + }, + total_entries: { + type: "integer", + title: "Total Entries", }, }, type: "object", - title: "HTTPValidationError", + required: ["dag_warnings", "total_entries"], + title: "DAGWarningCollectionResponse", + description: "DAG warning collection serializer for responses.", } as const; -export const $HealthInfoSchema = { +export const $DAGWarningResponse = { properties: { - metadatabase: { - $ref: "#/components/schemas/BaseInfoSchema", + dag_id: { + type: "string", + title: "Dag Id", }, - scheduler: { - $ref: "#/components/schemas/SchedulerInfoSchema", + warning_type: { + $ref: "#/components/schemas/DagWarningType", }, - triggerer: { - $ref: "#/components/schemas/TriggererInfoSchema", + message: { + type: "string", + title: "Message", }, - dag_processor: { - $ref: "#/components/schemas/DagProcessorInfoSchema", + timestamp: { + type: "string", + format: "date-time", + title: "Timestamp", }, }, type: "object", - required: ["metadatabase", "scheduler", "triggerer", "dag_processor"], - title: "HealthInfoSchema", - description: "Schema for the Health endpoint.", + required: ["dag_id", "warning_type", "message", "timestamp"], + title: "DAGWarningResponse", + description: "DAG Warning serializer for responses.", } as const; -export const $HistoricalMetricDataResponse = { +export const $DAGWithLatestDagRunsCollectionResponse = { properties: { - dag_run_types: { - $ref: "#/components/schemas/DAGRunTypes", - }, - dag_run_states: { - $ref: "#/components/schemas/DAGRunStates", + total_entries: { + type: "integer", + title: "Total Entries", }, - task_instance_states: { - $ref: "#/components/schemas/TaskInstanceState", + dags: { + items: { + $ref: "#/components/schemas/DAGWithLatestDagRunsResponse", + }, + type: "array", + title: "Dags", + }, + }, + type: "object", + required: ["total_entries", "dags"], + title: "DAGWithLatestDagRunsCollectionResponse", + description: "DAG with latest dag runs collection response serializer.", +} as const; + +export const $DAGWithLatestDagRunsResponse = { + properties: { + dag_id: { + type: "string", + title: "Dag Id", + }, + dag_display_name: { + type: "string", + title: "Dag Display Name", + }, + is_paused: { + type: "boolean", + title: "Is Paused", + }, + is_active: { + type: "boolean", + title: "Is Active", + }, + last_parsed_time: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Last Parsed Time", + }, + last_pickled: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Last Pickled", + }, + last_expired: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Last Expired", + }, + pickle_id: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Pickle Id", + }, + default_view: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Default View", + }, + fileloc: { + type: "string", + title: "Fileloc", + }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, + timetable_summary: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Timetable Summary", + }, + timetable_description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Timetable Description", + }, + tags: { + items: { + $ref: "#/components/schemas/DagTagPydantic", + }, + type: "array", + title: "Tags", + }, + max_active_tasks: { + type: "integer", + title: "Max Active Tasks", + }, + max_active_runs: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Max Active Runs", + }, + max_consecutive_failed_dag_runs: { + type: "integer", + title: "Max Consecutive Failed Dag Runs", + }, + has_task_concurrency_limits: { + type: "boolean", + title: "Has Task Concurrency Limits", + }, + has_import_errors: { + type: "boolean", + title: "Has Import Errors", + }, + next_dagrun: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Next Dagrun", + }, + next_dagrun_data_interval_start: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Next Dagrun Data Interval Start", + }, + next_dagrun_data_interval_end: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Next Dagrun Data Interval End", + }, + next_dagrun_create_after: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Next Dagrun Create After", + }, + owners: { + items: { + type: "string", + }, + type: "array", + title: "Owners", + }, + latest_dag_runs: { + items: { + $ref: "#/components/schemas/DAGRunResponse", + }, + type: "array", + title: "Latest Dag Runs", + }, + file_token: { + type: "string", + title: "File Token", + description: "Return file token.", + readOnly: true, + }, + }, + type: "object", + required: [ + "dag_id", + "dag_display_name", + "is_paused", + "is_active", + "last_parsed_time", + "last_pickled", + "last_expired", + "pickle_id", + "default_view", + "fileloc", + "description", + "timetable_summary", + "timetable_description", + "tags", + "max_active_tasks", + "max_active_runs", + "max_consecutive_failed_dag_runs", + "has_task_concurrency_limits", + "has_import_errors", + "next_dagrun", + "next_dagrun_data_interval_start", + "next_dagrun_data_interval_end", + "next_dagrun_create_after", + "owners", + "latest_dag_runs", + "file_token", + ], + title: "DAGWithLatestDagRunsResponse", + description: "DAG with latest dag runs response serializer.", +} as const; + +export const $DagProcessorInfoSchema = { + properties: { + status: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Status", + }, + latest_dag_processor_heartbeat: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Latest Dag Processor Heartbeat", + }, + }, + type: "object", + required: ["status", "latest_dag_processor_heartbeat"], + title: "DagProcessorInfoSchema", + description: "Schema for DagProcessor info.", +} as const; + +export const $DagRunState = { + type: "string", + enum: ["queued", "running", "success", "failed"], + title: "DagRunState", + description: `All possible states that a DagRun can be in. + +These are "shared" with TaskInstanceState in some parts of the code, +so please ensure that their values always match the ones with the +same name in TaskInstanceState.`, +} as const; + +export const $DagRunTriggeredByType = { + type: "string", + enum: [ + "cli", + "operator", + "rest_api", + "ui", + "test", + "timetable", + "asset", + "backfill", + ], + title: "DagRunTriggeredByType", + description: "Class with TriggeredBy types for DagRun.", +} as const; + +export const $DagRunType = { + type: "string", + enum: ["backfill", "scheduled", "manual", "asset_triggered"], + title: "DagRunType", + description: "Class with DagRun types.", +} as const; + +export const $DagTagPydantic = { + properties: { + name: { + type: "string", + title: "Name", + }, + dag_id: { + type: "string", + title: "Dag Id", + }, + }, + type: "object", + required: ["name", "dag_id"], + title: "DagTagPydantic", + description: + "Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API.", +} as const; + +export const $DagWarningType = { + type: "string", + enum: ["asset conflict", "non-existent pool"], + title: "DagWarningType", + description: `Enum for DAG warning types. + +This is the set of allowable values for the \`\`warning_type\`\` field +in the DagWarning model.`, +} as const; + +export const $EventLogResponse = { + properties: { + event_log_id: { + type: "integer", + title: "Event Log Id", + }, + when: { + type: "string", + format: "date-time", + title: "When", + }, + dag_id: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Dag Id", + }, + task_id: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Task Id", + }, + run_id: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Run Id", + }, + map_index: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Map Index", + }, + try_number: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Try Number", + }, + event: { + type: "string", + title: "Event", + }, + logical_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Logical Date", + }, + owner: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Owner", + }, + extra: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Extra", + }, + }, + type: "object", + required: [ + "event_log_id", + "when", + "dag_id", + "task_id", + "run_id", + "map_index", + "try_number", + "event", + "logical_date", + "owner", + "extra", + ], + title: "EventLogResponse", + description: "Event Log Response.", +} as const; + +export const $FastAPIAppResponse = { + properties: { + app: { + type: "string", + title: "App", + }, + url_prefix: { + type: "string", + title: "Url Prefix", + }, + name: { + type: "string", + title: "Name", + }, + }, + additionalProperties: true, + type: "object", + required: ["app", "url_prefix", "name"], + title: "FastAPIAppResponse", + description: "Serializer for Plugin FastAPI App responses.", +} as const; + +export const $HTTPExceptionResponse = { + properties: { + detail: { + anyOf: [ + { + type: "string", + }, + { + type: "object", + }, + ], + title: "Detail", + }, + }, + type: "object", + required: ["detail"], + title: "HTTPExceptionResponse", + description: "HTTPException Model used for error response.", +} as const; + +export const $HTTPValidationError = { + properties: { + detail: { + items: { + $ref: "#/components/schemas/ValidationError", + }, + type: "array", + title: "Detail", + }, + }, + type: "object", + title: "HTTPValidationError", +} as const; + +export const $HealthInfoSchema = { + properties: { + metadatabase: { + $ref: "#/components/schemas/BaseInfoSchema", + }, + scheduler: { + $ref: "#/components/schemas/SchedulerInfoSchema", + }, + triggerer: { + $ref: "#/components/schemas/TriggererInfoSchema", + }, + dag_processor: { + $ref: "#/components/schemas/DagProcessorInfoSchema", + }, + }, + type: "object", + required: ["metadatabase", "scheduler", "triggerer", "dag_processor"], + title: "HealthInfoSchema", + description: "Schema for the Health endpoint.", +} as const; + +export const $HistoricalMetricDataResponse = { + properties: { + dag_run_types: { + $ref: "#/components/schemas/DAGRunTypes", + }, + dag_run_states: { + $ref: "#/components/schemas/DAGRunStates", + }, + task_instance_states: { + $ref: "#/components/schemas/airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState", }, }, type: "object", @@ -1169,6 +1798,475 @@ export const $HistoricalMetricDataResponse = { description: "Historical Metric Data serializer for responses.", } as const; +export const $JobResponse = { + properties: { + id: { + type: "integer", + title: "Id", + }, + dag_id: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Dag Id", + }, + state: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "State", + }, + job_type: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Job Type", + }, + start_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Start Date", + }, + end_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "End Date", + }, + latest_heartbeat: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Latest Heartbeat", + }, + executor_class: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Executor Class", + }, + hostname: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Hostname", + }, + unixname: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Unixname", + }, + }, + type: "object", + required: [ + "id", + "dag_id", + "state", + "job_type", + "start_date", + "end_date", + "latest_heartbeat", + "executor_class", + "hostname", + "unixname", + ], + title: "JobResponse", + description: "Job serializer for responses.", +} as const; + +export const $PluginCollectionResponse = { + properties: { + plugins: { + items: { + $ref: "#/components/schemas/PluginResponse", + }, + type: "array", + title: "Plugins", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["plugins", "total_entries"], + title: "PluginCollectionResponse", + description: "Plugin Collection serializer.", +} as const; + +export const $PluginResponse = { + properties: { + name: { + type: "string", + title: "Name", + }, + macros: { + items: { + type: "string", + }, + type: "array", + title: "Macros", + }, + flask_blueprints: { + items: { + type: "string", + }, + type: "array", + title: "Flask Blueprints", + }, + fastapi_apps: { + items: { + $ref: "#/components/schemas/FastAPIAppResponse", + }, + type: "array", + title: "Fastapi Apps", + }, + appbuilder_views: { + items: { + $ref: "#/components/schemas/AppBuilderViewResponse", + }, + type: "array", + title: "Appbuilder Views", + }, + appbuilder_menu_items: { + items: { + $ref: "#/components/schemas/AppBuilderMenuItemResponse", + }, + type: "array", + title: "Appbuilder Menu Items", + }, + global_operator_extra_links: { + items: { + type: "string", + }, + type: "array", + title: "Global Operator Extra Links", + }, + operator_extra_links: { + items: { + type: "string", + }, + type: "array", + title: "Operator Extra Links", + }, + source: { + type: "string", + title: "Source", + }, + ti_deps: { + items: { + type: "string", + }, + type: "array", + title: "Ti Deps", + }, + listeners: { + items: { + type: "string", + }, + type: "array", + title: "Listeners", + }, + timetables: { + items: { + type: "string", + }, + type: "array", + title: "Timetables", + }, + }, + type: "object", + required: [ + "name", + "macros", + "flask_blueprints", + "fastapi_apps", + "appbuilder_views", + "appbuilder_menu_items", + "global_operator_extra_links", + "operator_extra_links", + "source", + "ti_deps", + "listeners", + "timetables", + ], + title: "PluginResponse", + description: "Plugin serializer.", +} as const; + +export const $PoolCollectionResponse = { + properties: { + pools: { + items: { + $ref: "#/components/schemas/PoolResponse", + }, + type: "array", + title: "Pools", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["pools", "total_entries"], + title: "PoolCollectionResponse", + description: "Pool Collection serializer for responses.", +} as const; + +export const $PoolPatchBody = { + properties: { + pool: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Pool", + }, + slots: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Slots", + }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, + include_deferred: { + anyOf: [ + { + type: "boolean", + }, + { + type: "null", + }, + ], + title: "Include Deferred", + }, + }, + type: "object", + title: "PoolPatchBody", + description: "Pool serializer for patch bodies.", +} as const; + +export const $PoolPostBody = { + properties: { + name: { + type: "string", + title: "Name", + }, + slots: { + type: "integer", + title: "Slots", + }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, + include_deferred: { + type: "boolean", + title: "Include Deferred", + default: false, + }, + }, + type: "object", + required: ["name", "slots"], + title: "PoolPostBody", + description: "Pool serializer for post bodies.", +} as const; + +export const $PoolResponse = { + properties: { + name: { + type: "string", + title: "Name", + }, + slots: { + type: "integer", + title: "Slots", + }, + description: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Description", + }, + include_deferred: { + type: "boolean", + title: "Include Deferred", + }, + occupied_slots: { + type: "integer", + title: "Occupied Slots", + }, + running_slots: { + type: "integer", + title: "Running Slots", + }, + queued_slots: { + type: "integer", + title: "Queued Slots", + }, + scheduled_slots: { + type: "integer", + title: "Scheduled Slots", + }, + open_slots: { + type: "integer", + title: "Open Slots", + }, + deferred_slots: { + type: "integer", + title: "Deferred Slots", + }, + }, + type: "object", + required: [ + "name", + "slots", + "description", + "include_deferred", + "occupied_slots", + "running_slots", + "queued_slots", + "scheduled_slots", + "open_slots", + "deferred_slots", + ], + title: "PoolResponse", + description: "Pool serializer for responses.", +} as const; + +export const $ProviderCollectionResponse = { + properties: { + providers: { + items: { + $ref: "#/components/schemas/ProviderResponse", + }, + type: "array", + title: "Providers", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["providers", "total_entries"], + title: "ProviderCollectionResponse", + description: "Provider Collection serializer for responses.", +} as const; + +export const $ProviderResponse = { + properties: { + package_name: { + type: "string", + title: "Package Name", + }, + description: { + type: "string", + title: "Description", + }, + version: { + type: "string", + title: "Version", + }, + }, + type: "object", + required: ["package_name", "description", "version"], + title: "ProviderResponse", + description: "Provider serializer for responses.", +} as const; + +export const $ReprocessBehavior = { + type: "string", + enum: ["failed", "completed", "none"], + title: "ReprocessBehavior", + description: `Internal enum for setting reprocess behavior in a backfill. + +:meta private:`, +} as const; + export const $SchedulerInfoSchema = { properties: { status: { @@ -1180,101 +2278,326 @@ export const $SchedulerInfoSchema = { type: "null", }, ], - title: "Status", + title: "Status", + }, + latest_scheduler_heartbeat: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Latest Scheduler Heartbeat", + }, + }, + type: "object", + required: ["status", "latest_scheduler_heartbeat"], + title: "SchedulerInfoSchema", + description: "Schema for Scheduler info.", +} as const; + +export const $TaskInstanceResponse = { + properties: { + task_id: { + type: "string", + title: "Task Id", + }, + dag_id: { + type: "string", + title: "Dag Id", + }, + dag_run_id: { + type: "string", + title: "Dag Run Id", + }, + map_index: { + type: "integer", + title: "Map Index", + }, + logical_date: { + type: "string", + format: "date-time", + title: "Logical Date", + }, + start_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Start Date", + }, + end_date: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "End Date", }, - latest_scheduler_heartbeat: { + duration: { anyOf: [ { - type: "string", + type: "number", }, { type: "null", }, ], - title: "Latest Scheduler Heartbeat", + title: "Duration", }, - }, - type: "object", - required: ["status", "latest_scheduler_heartbeat"], - title: "SchedulerInfoSchema", - description: "Schema for Scheduler info.", -} as const; - -export const $TaskInstanceState = { - properties: { - no_status: { - type: "integer", - title: "No Status", + state: { + anyOf: [ + { + $ref: "#/components/schemas/airflow__utils__state__TaskInstanceState", + }, + { + type: "null", + }, + ], }, - removed: { + try_number: { type: "integer", - title: "Removed", + title: "Try Number", }, - scheduled: { + max_tries: { type: "integer", - title: "Scheduled", + title: "Max Tries", }, - queued: { - type: "integer", - title: "Queued", + task_display_name: { + type: "string", + title: "Task Display Name", }, - running: { - type: "integer", - title: "Running", + hostname: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Hostname", }, - success: { - type: "integer", - title: "Success", + unixname: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Unixname", }, - restarting: { - type: "integer", - title: "Restarting", + pool: { + type: "string", + title: "Pool", }, - failed: { + pool_slots: { type: "integer", - title: "Failed", + title: "Pool Slots", }, - up_for_retry: { - type: "integer", - title: "Up For Retry", + queue: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Queue", }, - up_for_reschedule: { - type: "integer", - title: "Up For Reschedule", + priority_weight: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Priority Weight", }, - upstream_failed: { - type: "integer", - title: "Upstream Failed", + operator: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Operator", }, - skipped: { - type: "integer", - title: "Skipped", + queued_when: { + anyOf: [ + { + type: "string", + format: "date-time", + }, + { + type: "null", + }, + ], + title: "Queued When", }, - deferred: { - type: "integer", - title: "Deferred", + pid: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Pid", + }, + executor: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Executor", + }, + executor_config: { + type: "string", + title: "Executor Config", + }, + note: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Note", + }, + rendered_map_index: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Rendered Map Index", + }, + rendered_fields: { + type: "object", + title: "Rendered Fields", + default: {}, + }, + trigger: { + anyOf: [ + { + $ref: "#/components/schemas/TriggerResponse", + }, + { + type: "null", + }, + ], + }, + triggerer_job: { + anyOf: [ + { + $ref: "#/components/schemas/JobResponse", + }, + { + type: "null", + }, + ], }, }, type: "object", required: [ - "no_status", - "removed", - "scheduled", - "queued", - "running", - "success", - "restarting", - "failed", - "up_for_retry", - "up_for_reschedule", - "upstream_failed", - "skipped", - "deferred", + "task_id", + "dag_id", + "dag_run_id", + "map_index", + "logical_date", + "start_date", + "end_date", + "duration", + "state", + "try_number", + "max_tries", + "task_display_name", + "hostname", + "unixname", + "pool", + "pool_slots", + "queue", + "priority_weight", + "operator", + "queued_when", + "pid", + "executor", + "executor_config", + "note", + "rendered_map_index", + "trigger", + "triggerer_job", ], - title: "TaskInstanceState", + title: "TaskInstanceResponse", description: "TaskInstance serializer for responses.", } as const; +export const $TriggerResponse = { + properties: { + id: { + type: "integer", + title: "Id", + }, + classpath: { + type: "string", + title: "Classpath", + }, + kwargs: { + type: "string", + title: "Kwargs", + }, + created_date: { + type: "string", + format: "date-time", + title: "Created Date", + }, + triggerer_id: { + anyOf: [ + { + type: "integer", + }, + { + type: "null", + }, + ], + title: "Triggerer Id", + }, + }, + type: "object", + required: ["id", "classpath", "kwargs", "created_date", "triggerer_id"], + title: "TriggerResponse", + description: "Trigger serializer for responses.", +} as const; + export const $TriggererInfoSchema = { properties: { status: { @@ -1371,6 +2694,26 @@ export const $VariableBody = { description: "Variable serializer for bodies.", } as const; +export const $VariableCollectionResponse = { + properties: { + variables: { + items: { + $ref: "#/components/schemas/VariableResponse", + }, + type: "array", + title: "Variables", + }, + total_entries: { + type: "integer", + title: "Total Entries", + }, + }, + type: "object", + required: ["variables", "total_entries"], + title: "VariableCollectionResponse", + description: "Variable Collection serializer for responses.", +} as const; + export const $VariableResponse = { properties: { key: { @@ -1405,3 +2748,125 @@ export const $VariableResponse = { title: "VariableResponse", description: "Variable serializer for responses.", } as const; + +export const $VersionInfo = { + properties: { + version: { + type: "string", + title: "Version", + }, + git_version: { + anyOf: [ + { + type: "string", + }, + { + type: "null", + }, + ], + title: "Git Version", + }, + }, + type: "object", + required: ["version", "git_version"], + title: "VersionInfo", + description: "Version information serializer for responses.", +} as const; + +export const $airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState = + { + properties: { + no_status: { + type: "integer", + title: "No Status", + }, + removed: { + type: "integer", + title: "Removed", + }, + scheduled: { + type: "integer", + title: "Scheduled", + }, + queued: { + type: "integer", + title: "Queued", + }, + running: { + type: "integer", + title: "Running", + }, + success: { + type: "integer", + title: "Success", + }, + restarting: { + type: "integer", + title: "Restarting", + }, + failed: { + type: "integer", + title: "Failed", + }, + up_for_retry: { + type: "integer", + title: "Up For Retry", + }, + up_for_reschedule: { + type: "integer", + title: "Up For Reschedule", + }, + upstream_failed: { + type: "integer", + title: "Upstream Failed", + }, + skipped: { + type: "integer", + title: "Skipped", + }, + deferred: { + type: "integer", + title: "Deferred", + }, + }, + type: "object", + required: [ + "no_status", + "removed", + "scheduled", + "queued", + "running", + "success", + "restarting", + "failed", + "up_for_retry", + "up_for_reschedule", + "upstream_failed", + "skipped", + "deferred", + ], + title: "TaskInstanceState", + description: "TaskInstance serializer for responses.", + } as const; + +export const $airflow__utils__state__TaskInstanceState = { + type: "string", + enum: [ + "removed", + "scheduled", + "queued", + "running", + "success", + "restarting", + "failed", + "up_for_retry", + "up_for_reschedule", + "upstream_failed", + "skipped", + "deferred", + ], + title: "TaskInstanceState", + description: `All possible states that a Task Instance can be in. + +Note that None is also allowed, so always use this in a type hint with Optional.`, +} as const; diff --git a/airflow/ui/openapi-gen/requests/services.gen.ts b/airflow/ui/openapi-gen/requests/services.gen.ts index 7516a625d71b0..e82d0c5146fa8 100644 --- a/airflow/ui/openapi-gen/requests/services.gen.ts +++ b/airflow/ui/openapi-gen/requests/services.gen.ts @@ -7,10 +7,26 @@ import type { NextRunAssetsResponse, HistoricalMetricsData, HistoricalMetricsResponse, + RecentDagRunsData, + RecentDagRunsResponse, + ListBackfillsData, + ListBackfillsResponse, + CreateBackfillData, + CreateBackfillResponse, + GetBackfillData, + GetBackfillResponse, + PauseBackfillData, + PauseBackfillResponse, + UnpauseBackfillData, + UnpauseBackfillResponse, + CancelBackfillData, + CancelBackfillResponse, GetDagsData, GetDagsResponse, PatchDagsData, PatchDagsResponse, + GetDagTagsData, + GetDagTagsResponse, GetDagData, GetDagResponse, PatchDagData, @@ -25,19 +41,48 @@ import type { GetConnectionResponse, GetConnectionsData, GetConnectionsResponse, + GetDagRunData, + GetDagRunResponse, + DeleteDagRunData, + DeleteDagRunResponse, + PatchDagRunStateData, + PatchDagRunStateResponse, + GetDagSourceData, + GetDagSourceResponse, + GetEventLogData, + GetEventLogResponse, + GetHealthResponse, + ListDagWarningsData, + ListDagWarningsResponse, + GetPluginsData, + GetPluginsResponse, + DeletePoolData, + DeletePoolResponse, + GetPoolData, + GetPoolResponse, + PatchPoolData, + PatchPoolResponse, + GetPoolsData, + GetPoolsResponse, + PostPoolData, + PostPoolResponse, + GetProvidersData, + GetProvidersResponse, + GetTaskInstanceData, + GetTaskInstanceResponse, + GetMappedTaskInstanceData, + GetMappedTaskInstanceResponse, DeleteVariableData, DeleteVariableResponse, GetVariableData, GetVariableResponse, PatchVariableData, PatchVariableResponse, + GetVariablesData, + GetVariablesResponse, PostVariableData, PostVariableResponse, - GetDagRunData, - GetDagRunResponse, - DeleteDagRunData, - DeleteDagRunResponse, - GetHealthResponse, + GetVersionResponse, } from "./types.gen"; export class AssetService { @@ -92,6 +137,209 @@ export class DashboardService { } } +export class DagsService { + /** + * Recent Dag Runs + * Get recent DAG runs. + * @param data The data for the request. + * @param data.dagRunsLimit + * @param data.limit + * @param data.offset + * @param data.tags + * @param data.owners + * @param data.dagIdPattern + * @param data.dagDisplayNamePattern + * @param data.onlyActive + * @param data.paused + * @param data.lastDagRunState + * @returns DAGWithLatestDagRunsCollectionResponse Successful Response + * @throws ApiError + */ + public static recentDagRuns( + data: RecentDagRunsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/ui/dags/recent_dag_runs", + query: { + dag_runs_limit: data.dagRunsLimit, + limit: data.limit, + offset: data.offset, + tags: data.tags, + owners: data.owners, + dag_id_pattern: data.dagIdPattern, + dag_display_name_pattern: data.dagDisplayNamePattern, + only_active: data.onlyActive, + paused: data.paused, + last_dag_run_state: data.lastDagRunState, + }, + errors: { + 422: "Validation Error", + }, + }); + } +} + +export class BackfillService { + /** + * List Backfills + * @param data The data for the request. + * @param data.dagId + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns unknown Successful Response + * @throws ApiError + */ + public static listBackfills( + data: ListBackfillsData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/backfills/", + query: { + dag_id: data.dagId, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } + + /** + * Create Backfill + * @param data The data for the request. + * @param data.requestBody + * @returns unknown Successful Response + * @throws ApiError + */ + public static createBackfill( + data: CreateBackfillData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/backfills/", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } + + /** + * Get Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ + public static getBackfill( + data: GetBackfillData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/backfills/{backfill_id}", + path: { + backfill_id: data.backfillId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Pause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ + public static pauseBackfill( + data: PauseBackfillData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PUT", + url: "/public/backfills/{backfill_id}/pause", + path: { + backfill_id: data.backfillId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } + + /** + * Unpause Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ + public static unpauseBackfill( + data: UnpauseBackfillData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PUT", + url: "/public/backfills/{backfill_id}/unpause", + path: { + backfill_id: data.backfillId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } + + /** + * Cancel Backfill + * @param data The data for the request. + * @param data.backfillId + * @returns unknown Successful Response + * @throws ApiError + */ + public static cancelBackfill( + data: CancelBackfillData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PUT", + url: "/public/backfills/{backfill_id}/cancel", + path: { + backfill_id: data.backfillId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 409: "Conflict", + 422: "Validation Error", + }, + }); + } +} + export class DagService { /** * Get Dags @@ -180,6 +428,37 @@ export class DagService { }); } + /** + * Get Dag Tags + * Get all DAG tags. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @param data.tagNamePattern + * @returns DAGTagCollectionResponse Successful Response + * @throws ApiError + */ + public static getDagTags( + data: GetDagTagsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dags/tags", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + tag_name_pattern: data.tagNamePattern, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } + /** * Get Dag * Get basic information about a DAG. @@ -378,23 +657,24 @@ export class ConnectionService { } } -export class VariableService { +export class DagRunService { /** - * Delete Variable - * Delete a variable entry. + * Get Dag Run * @param data The data for the request. - * @param data.variableKey - * @returns void Successful Response + * @param data.dagId + * @param data.dagRunId + * @returns DAGRunResponse Successful Response * @throws ApiError */ - public static deleteVariable( - data: DeleteVariableData, - ): CancelablePromise { + public static getDagRun( + data: GetDagRunData, + ): CancelablePromise { return __request(OpenAPI, { - method: "DELETE", - url: "/public/variables/{variable_key}", + method: "GET", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", path: { - variable_key: data.variableKey, + dag_id: data.dagId, + dag_run_id: data.dagRunId, }, errors: { 401: "Unauthorized", @@ -406,23 +686,26 @@ export class VariableService { } /** - * Get Variable - * Get a variable entry. + * Delete Dag Run + * Delete a DAG Run entry. * @param data The data for the request. - * @param data.variableKey - * @returns VariableResponse Successful Response + * @param data.dagId + * @param data.dagRunId + * @returns void Successful Response * @throws ApiError */ - public static getVariable( - data: GetVariableData, - ): CancelablePromise { + public static deleteDagRun( + data: DeleteDagRunData, + ): CancelablePromise { return __request(OpenAPI, { - method: "GET", - url: "/public/variables/{variable_key}", + method: "DELETE", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", path: { - variable_key: data.variableKey, + dag_id: data.dagId, + dag_run_id: data.dagRunId, }, errors: { + 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", 404: "Not Found", @@ -432,23 +715,25 @@ export class VariableService { } /** - * Patch Variable - * Update a variable by key. + * Patch Dag Run State + * Modify a DAG Run. * @param data The data for the request. - * @param data.variableKey + * @param data.dagId + * @param data.dagRunId * @param data.requestBody * @param data.updateMask - * @returns VariableResponse Successful Response + * @returns DAGRunResponse Successful Response * @throws ApiError */ - public static patchVariable( - data: PatchVariableData, - ): CancelablePromise { + public static patchDagRunState( + data: PatchDagRunStateData, + ): CancelablePromise { return __request(OpenAPI, { method: "PATCH", - url: "/public/variables/{variable_key}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", path: { - variable_key: data.variableKey, + dag_id: data.dagId, + dag_run_id: data.dagRunId, }, query: { update_mask: data.updateMask, @@ -464,50 +749,333 @@ export class VariableService { }, }); } +} +export class DagSourceService { /** - * Post Variable - * Create a variable. + * Get Dag Source + * Get source code using file token. * @param data The data for the request. - * @param data.requestBody - * @returns VariableResponse Successful Response + * @param data.fileToken + * @param data.accept + * @returns DAGSourceResponse Successful Response * @throws ApiError */ - public static postVariable( - data: PostVariableData, - ): CancelablePromise { + public static getDagSource( + data: GetDagSourceData, + ): CancelablePromise { return __request(OpenAPI, { - method: "POST", - url: "/public/variables/", - body: data.requestBody, - mediaType: "application/json", + method: "GET", + url: "/public/dagSources/{file_token}", + path: { + file_token: data.fileToken, + }, + headers: { + accept: data.accept, + }, errors: { + 400: "Bad Request", 401: "Unauthorized", 403: "Forbidden", + 404: "Not Found", + 406: "Not Acceptable", 422: "Validation Error", }, }); } } -export class DagRunService { +export class EventLogService { /** - * Get Dag Run + * Get Event Log + * @param data The data for the request. + * @param data.eventLogId + * @returns EventLogResponse Successful Response + * @throws ApiError + */ + public static getEventLog( + data: GetEventLogData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/eventLogs/{event_log_id}", + path: { + event_log_id: data.eventLogId, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} + +export class MonitorService { + /** + * Get Health + * @returns HealthInfoSchema Successful Response + * @throws ApiError + */ + public static getHealth(): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/monitor/health", + }); + } +} + +export class DagWarningService { + /** + * List Dag Warnings + * Get a list of DAG warnings. + * @param data The data for the request. + * @param data.dagId + * @param data.warningType + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns DAGWarningCollectionResponse Successful Response + * @throws ApiError + */ + public static listDagWarnings( + data: ListDagWarningsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/dagWarnings", + query: { + dag_id: data.dagId, + warning_type: data.warningType, + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } +} + +export class PluginService { + /** + * Get Plugins + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns PluginCollectionResponse Successful Response + * @throws ApiError + */ + public static getPlugins( + data: GetPluginsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/plugins/", + query: { + limit: data.limit, + offset: data.offset, + }, + errors: { + 422: "Validation Error", + }, + }); + } +} + +export class PoolService { + /** + * Delete Pool + * Delete a pool entry. + * @param data The data for the request. + * @param data.poolName + * @returns void Successful Response + * @throws ApiError + */ + public static deletePool( + data: DeletePoolData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/pools/{pool_name}", + path: { + pool_name: data.poolName, + }, + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Pool + * Get a pool. + * @param data The data for the request. + * @param data.poolName + * @returns PoolResponse Successful Response + * @throws ApiError + */ + public static getPool(data: GetPoolData): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/pools/{pool_name}", + path: { + pool_name: data.poolName, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Patch Pool + * Update a Pool. + * @param data The data for the request. + * @param data.poolName + * @param data.requestBody + * @param data.updateMask + * @returns PoolResponse Successful Response + * @throws ApiError + */ + public static patchPool( + data: PatchPoolData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PATCH", + url: "/public/pools/{pool_name}", + path: { + pool_name: data.poolName, + }, + query: { + update_mask: data.updateMask, + }, + body: data.requestBody, + mediaType: "application/json", + errors: { + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Pools + * Get all pools entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns PoolCollectionResponse Successful Response + * @throws ApiError + */ + public static getPools( + data: GetPoolsData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/pools/", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Post Pool + * Create a Pool. + * @param data The data for the request. + * @param data.requestBody + * @returns PoolResponse Successful Response + * @throws ApiError + */ + public static postPool( + data: PostPoolData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/pools/", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } +} + +export class ProviderService { + /** + * Get Providers + * Get providers. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @returns ProviderCollectionResponse Successful Response + * @throws ApiError + */ + public static getProviders( + data: GetProvidersData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/providers/", + query: { + limit: data.limit, + offset: data.offset, + }, + errors: { + 422: "Validation Error", + }, + }); + } +} + +export class TaskInstanceService { + /** + * Get Task Instance + * Get task instance. * @param data The data for the request. * @param data.dagId * @param data.dagRunId - * @returns DAGRunResponse Successful Response + * @param data.taskId + * @returns TaskInstanceResponse Successful Response * @throws ApiError */ - public static getDagRun( - data: GetDagRunData, - ): CancelablePromise { + public static getTaskInstance( + data: GetTaskInstanceData, + ): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}", path: { dag_id: data.dagId, dag_run_id: data.dagRunId, + task_id: data.taskId, }, errors: { 401: "Unauthorized", @@ -519,24 +1087,115 @@ export class DagRunService { } /** - * Delete Dag Run - * Delete a DAG Run entry. + * Get Mapped Task Instance + * Get task instance. * @param data The data for the request. * @param data.dagId * @param data.dagRunId - * @returns void Successful Response + * @param data.taskId + * @param data.mapIndex + * @returns TaskInstanceResponse Successful Response * @throws ApiError */ - public static deleteDagRun( - data: DeleteDagRunData, - ): CancelablePromise { + public static getMappedTaskInstance( + data: GetMappedTaskInstanceData, + ): CancelablePromise { return __request(OpenAPI, { - method: "DELETE", - url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}", + method: "GET", + url: "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}", path: { dag_id: data.dagId, dag_run_id: data.dagRunId, + task_id: data.taskId, + map_index: data.mapIndex, }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } +} + +export class VariableService { + /** + * Delete Variable + * Delete a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns void Successful Response + * @throws ApiError + */ + public static deleteVariable( + data: DeleteVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "DELETE", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Get Variable + * Get a variable entry. + * @param data The data for the request. + * @param data.variableKey + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static getVariable( + data: GetVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 404: "Not Found", + 422: "Validation Error", + }, + }); + } + + /** + * Patch Variable + * Update a variable by key. + * @param data The data for the request. + * @param data.variableKey + * @param data.requestBody + * @param data.updateMask + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static patchVariable( + data: PatchVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "PATCH", + url: "/public/variables/{variable_key}", + path: { + variable_key: data.variableKey, + }, + query: { + update_mask: data.updateMask, + }, + body: data.requestBody, + mediaType: "application/json", errors: { 400: "Bad Request", 401: "Unauthorized", @@ -546,18 +1205,72 @@ export class DagRunService { }, }); } + + /** + * Get Variables + * Get all Variables entries. + * @param data The data for the request. + * @param data.limit + * @param data.offset + * @param data.orderBy + * @returns VariableCollectionResponse Successful Response + * @throws ApiError + */ + public static getVariables( + data: GetVariablesData = {}, + ): CancelablePromise { + return __request(OpenAPI, { + method: "GET", + url: "/public/variables/", + query: { + limit: data.limit, + offset: data.offset, + order_by: data.orderBy, + }, + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } + + /** + * Post Variable + * Create a variable. + * @param data The data for the request. + * @param data.requestBody + * @returns VariableResponse Successful Response + * @throws ApiError + */ + public static postVariable( + data: PostVariableData, + ): CancelablePromise { + return __request(OpenAPI, { + method: "POST", + url: "/public/variables/", + body: data.requestBody, + mediaType: "application/json", + errors: { + 401: "Unauthorized", + 403: "Forbidden", + 422: "Validation Error", + }, + }); + } } -export class MonitorService { +export class VersionService { /** - * Get Health - * @returns HealthInfoSchema Successful Response + * Get Version + * Get version information. + * @returns VersionInfo Successful Response * @throws ApiError */ - public static getHealth(): CancelablePromise { + public static getVersion(): CancelablePromise { return __request(OpenAPI, { method: "GET", - url: "/public/monitor/health", + url: "/public/version/", }); } } diff --git a/airflow/ui/openapi-gen/requests/types.gen.ts b/airflow/ui/openapi-gen/requests/types.gen.ts index 5f5ce6da71e3c..2c11dd03cc0af 100644 --- a/airflow/ui/openapi-gen/requests/types.gen.ts +++ b/airflow/ui/openapi-gen/requests/types.gen.ts @@ -1,5 +1,41 @@ // This file is auto-generated by @hey-api/openapi-ts +/** + * Serializer for AppBuilder Menu Item responses. + */ +export type AppBuilderMenuItemResponse = { + name: string; + href?: string | null; + category?: string | null; + [key: string]: unknown | string; +}; + +/** + * Serializer for AppBuilder View responses. + */ +export type AppBuilderViewResponse = { + name?: string | null; + category?: string | null; + view?: string | null; + label?: string | null; + [key: string]: unknown; +}; + +/** + * Object used for create backfill request. + */ +export type BackfillPostBody = { + dag_id: string; + from_date: string; + to_date: string; + run_backwards?: boolean; + dag_run_conf?: { + [key: string]: unknown; + }; + reprocess_behavior?: ReprocessBehavior; + max_active_runs?: number; +}; + /** * Base status field for metadatabase and scheduler. */ @@ -8,7 +44,7 @@ export type BaseInfoSchema = { }; /** - * DAG Collection serializer for responses. + * Connection Collection serializer for responses. */ export type ConnectionCollectionResponse = { connections: Array; @@ -48,7 +84,6 @@ export type DAGDetailsResponse = { last_parsed_time: string | null; last_pickled: string | null; last_expired: string | null; - scheduler_lock: string | null; pickle_id: string | null; default_view: string | null; fileloc: string; @@ -68,14 +103,13 @@ export type DAGDetailsResponse = { owners: Array; catchup: boolean; dag_run_timeout: string | null; - dataset_expression: { + asset_expression: { [key: string]: unknown; } | null; doc_md: string | null; start_date: string | null; end_date: string | null; is_paused_upon_creation: boolean | null; - orientation: string; params: { [key: string]: unknown; } | null; @@ -111,7 +145,6 @@ export type DAGResponse = { last_parsed_time: string | null; last_pickled: string | null; last_expired: string | null; - scheduler_lock: string | null; pickle_id: string | null; default_view: string | null; fileloc: string; @@ -135,6 +168,18 @@ export type DAGResponse = { readonly file_token: string; }; +/** + * DAG Run Serializer for PATCH requests. + */ +export type DAGRunPatchBody = { + state: DAGRunPatchStates; +}; + +/** + * Enum for DAG Run states when updating a DAG Run. + */ +export type DAGRunPatchStates = "queued" | "success" | "failed"; + /** * DAG Run serializer for responses. */ @@ -174,7 +219,83 @@ export type DAGRunTypes = { backfill: number; scheduled: number; manual: number; - dataset_triggered: number; + asset_triggered: number; +}; + +/** + * DAG Source serializer for responses. + */ +export type DAGSourceResponse = { + content: string | null; +}; + +/** + * DAG Tags Collection serializer for responses. + */ +export type DAGTagCollectionResponse = { + tags: Array; + total_entries: number; +}; + +/** + * DAG warning collection serializer for responses. + */ +export type DAGWarningCollectionResponse = { + dag_warnings: Array; + total_entries: number; +}; + +/** + * DAG Warning serializer for responses. + */ +export type DAGWarningResponse = { + dag_id: string; + warning_type: DagWarningType; + message: string; + timestamp: string; +}; + +/** + * DAG with latest dag runs collection response serializer. + */ +export type DAGWithLatestDagRunsCollectionResponse = { + total_entries: number; + dags: Array; +}; + +/** + * DAG with latest dag runs response serializer. + */ +export type DAGWithLatestDagRunsResponse = { + dag_id: string; + dag_display_name: string; + is_paused: boolean; + is_active: boolean; + last_parsed_time: string | null; + last_pickled: string | null; + last_expired: string | null; + pickle_id: string | null; + default_view: string | null; + fileloc: string; + description: string | null; + timetable_summary: string | null; + timetable_description: string | null; + tags: Array; + max_active_tasks: number; + max_active_runs: number | null; + max_consecutive_failed_dag_runs: number; + has_task_concurrency_limits: boolean; + has_import_errors: boolean; + next_dagrun: string | null; + next_dagrun_data_interval_start: string | null; + next_dagrun_data_interval_end: string | null; + next_dagrun_create_after: string | null; + owners: Array; + latest_dag_runs: Array; + /** + * Return file token. + */ + readonly file_token: string; }; /** @@ -204,7 +325,7 @@ export type DagRunTriggeredByType = | "ui" | "test" | "timetable" - | "dataset" + | "asset" | "backfill"; /** @@ -214,7 +335,7 @@ export type DagRunType = | "backfill" | "scheduled" | "manual" - | "dataset_triggered"; + | "asset_triggered"; /** * Serializable representation of the DagTag ORM SqlAlchemyModel used by internal API. @@ -224,6 +345,41 @@ export type DagTagPydantic = { dag_id: string; }; +/** + * Enum for DAG warning types. + * + * This is the set of allowable values for the ``warning_type`` field + * in the DagWarning model. + */ +export type DagWarningType = "asset conflict" | "non-existent pool"; + +/** + * Event Log Response. + */ +export type EventLogResponse = { + event_log_id: number; + when: string; + dag_id: string | null; + task_id: string | null; + run_id: string | null; + map_index: number | null; + try_number: number | null; + event: string; + logical_date: string | null; + owner: string | null; + extra: string | null; +}; + +/** + * Serializer for Plugin FastAPI App responses. + */ +export type FastAPIAppResponse = { + app: string; + url_prefix: string; + name: string; + [key: string]: unknown | string; +}; + /** * HTTPException Model used for error response. */ @@ -255,9 +411,119 @@ export type HealthInfoSchema = { export type HistoricalMetricDataResponse = { dag_run_types: DAGRunTypes; dag_run_states: DAGRunStates; - task_instance_states: TaskInstanceState; + task_instance_states: airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState; +}; + +/** + * Job serializer for responses. + */ +export type JobResponse = { + id: number; + dag_id: string | null; + state: string | null; + job_type: string | null; + start_date: string | null; + end_date: string | null; + latest_heartbeat: string | null; + executor_class: string | null; + hostname: string | null; + unixname: string | null; +}; + +/** + * Plugin Collection serializer. + */ +export type PluginCollectionResponse = { + plugins: Array; + total_entries: number; +}; + +/** + * Plugin serializer. + */ +export type PluginResponse = { + name: string; + macros: Array; + flask_blueprints: Array; + fastapi_apps: Array; + appbuilder_views: Array; + appbuilder_menu_items: Array; + global_operator_extra_links: Array; + operator_extra_links: Array; + source: string; + ti_deps: Array; + listeners: Array; + timetables: Array; +}; + +/** + * Pool Collection serializer for responses. + */ +export type PoolCollectionResponse = { + pools: Array; + total_entries: number; +}; + +/** + * Pool serializer for patch bodies. + */ +export type PoolPatchBody = { + pool?: string | null; + slots?: number | null; + description?: string | null; + include_deferred?: boolean | null; +}; + +/** + * Pool serializer for post bodies. + */ +export type PoolPostBody = { + name: string; + slots: number; + description?: string | null; + include_deferred?: boolean; +}; + +/** + * Pool serializer for responses. + */ +export type PoolResponse = { + name: string; + slots: number; + description: string | null; + include_deferred: boolean; + occupied_slots: number; + running_slots: number; + queued_slots: number; + scheduled_slots: number; + open_slots: number; + deferred_slots: number; +}; + +/** + * Provider Collection serializer for responses. + */ +export type ProviderCollectionResponse = { + providers: Array; + total_entries: number; +}; + +/** + * Provider serializer for responses. + */ +export type ProviderResponse = { + package_name: string; + description: string; + version: string; }; +/** + * Internal enum for setting reprocess behavior in a backfill. + * + * :meta private: + */ +export type ReprocessBehavior = "failed" | "completed" | "none"; + /** * Schema for Scheduler info. */ @@ -269,20 +535,48 @@ export type SchedulerInfoSchema = { /** * TaskInstance serializer for responses. */ -export type TaskInstanceState = { - no_status: number; - removed: number; - scheduled: number; - queued: number; - running: number; - success: number; - restarting: number; - failed: number; - up_for_retry: number; - up_for_reschedule: number; - upstream_failed: number; - skipped: number; - deferred: number; +export type TaskInstanceResponse = { + task_id: string; + dag_id: string; + dag_run_id: string; + map_index: number; + logical_date: string; + start_date: string | null; + end_date: string | null; + duration: number | null; + state: airflow__utils__state__TaskInstanceState | null; + try_number: number; + max_tries: number; + task_display_name: string; + hostname: string | null; + unixname: string | null; + pool: string; + pool_slots: number; + queue: string | null; + priority_weight: number | null; + operator: string | null; + queued_when: string | null; + pid: number | null; + executor: string | null; + executor_config: string; + note: string | null; + rendered_map_index: string | null; + rendered_fields?: { + [key: string]: unknown; + }; + trigger: TriggerResponse | null; + triggerer_job: JobResponse | null; +}; + +/** + * Trigger serializer for responses. + */ +export type TriggerResponse = { + id: number; + classpath: string; + kwargs: string; + created_date: string; + triggerer_id: number | null; }; /** @@ -308,6 +602,14 @@ export type VariableBody = { value: string | null; }; +/** + * Variable Collection serializer for responses. + */ +export type VariableCollectionResponse = { + variables: Array; + total_entries: number; +}; + /** * Variable serializer for responses. */ @@ -317,6 +619,53 @@ export type VariableResponse = { value: string | null; }; +/** + * Version information serializer for responses. + */ +export type VersionInfo = { + version: string; + git_version: string | null; +}; + +/** + * TaskInstance serializer for responses. + */ +export type airflow__api_fastapi__core_api__serializers__dashboard__TaskInstanceState = + { + no_status: number; + removed: number; + scheduled: number; + queued: number; + running: number; + success: number; + restarting: number; + failed: number; + up_for_retry: number; + up_for_reschedule: number; + upstream_failed: number; + skipped: number; + deferred: number; + }; + +/** + * All possible states that a Task Instance can be in. + * + * Note that None is also allowed, so always use this in a type hint with Optional. + */ +export type airflow__utils__state__TaskInstanceState = + | "removed" + | "scheduled" + | "queued" + | "running" + | "success" + | "restarting" + | "failed" + | "up_for_retry" + | "up_for_reschedule" + | "upstream_failed" + | "skipped" + | "deferred"; + export type NextRunAssetsData = { dagId: string; }; @@ -332,6 +681,60 @@ export type HistoricalMetricsData = { export type HistoricalMetricsResponse = HistoricalMetricDataResponse; +export type RecentDagRunsData = { + dagDisplayNamePattern?: string | null; + dagIdPattern?: string | null; + dagRunsLimit?: number; + lastDagRunState?: DagRunState | null; + limit?: number; + offset?: number; + onlyActive?: boolean; + owners?: Array; + paused?: boolean | null; + tags?: Array; +}; + +export type RecentDagRunsResponse = DAGWithLatestDagRunsCollectionResponse; + +export type ListBackfillsData = { + dagId: string; + limit?: number; + offset?: number; + orderBy?: string; +}; + +export type ListBackfillsResponse = unknown; + +export type CreateBackfillData = { + requestBody: BackfillPostBody; +}; + +export type CreateBackfillResponse = unknown; + +export type GetBackfillData = { + backfillId: string; +}; + +export type GetBackfillResponse = unknown; + +export type PauseBackfillData = { + backfillId: unknown; +}; + +export type PauseBackfillResponse = unknown; + +export type UnpauseBackfillData = { + backfillId: unknown; +}; + +export type UnpauseBackfillResponse = unknown; + +export type CancelBackfillData = { + backfillId: unknown; +}; + +export type CancelBackfillResponse = unknown; + export type GetDagsData = { dagDisplayNamePattern?: string | null; dagIdPattern?: string | null; @@ -362,6 +765,15 @@ export type PatchDagsData = { export type PatchDagsResponse = DAGCollectionResponse; +export type GetDagTagsData = { + limit?: number; + offset?: number; + orderBy?: string; + tagNamePattern?: string | null; +}; + +export type GetDagTagsResponse = DAGTagCollectionResponse; + export type GetDagData = { dagId: string; }; @@ -408,59 +820,611 @@ export type GetConnectionsData = { export type GetConnectionsResponse = ConnectionCollectionResponse; -export type DeleteVariableData = { - variableKey: string; +export type GetDagRunData = { + dagId: string; + dagRunId: string; }; -export type DeleteVariableResponse = void; +export type GetDagRunResponse = DAGRunResponse; -export type GetVariableData = { - variableKey: string; +export type DeleteDagRunData = { + dagId: string; + dagRunId: string; }; -export type GetVariableResponse = VariableResponse; +export type DeleteDagRunResponse = void; -export type PatchVariableData = { - requestBody: VariableBody; +export type PatchDagRunStateData = { + dagId: string; + dagRunId: string; + requestBody: DAGRunPatchBody; updateMask?: Array | null; - variableKey: string; }; -export type PatchVariableResponse = VariableResponse; +export type PatchDagRunStateResponse = DAGRunResponse; -export type PostVariableData = { - requestBody: VariableBody; +export type GetDagSourceData = { + accept?: string; + fileToken: string; }; -export type PostVariableResponse = VariableResponse; +export type GetDagSourceResponse = DAGSourceResponse; -export type GetDagRunData = { - dagId: string; - dagRunId: string; +export type GetEventLogData = { + eventLogId: number; }; -export type GetDagRunResponse = DAGRunResponse; +export type GetEventLogResponse = EventLogResponse; -export type DeleteDagRunData = { - dagId: string; - dagRunId: string; +export type GetHealthResponse = HealthInfoSchema; + +export type ListDagWarningsData = { + dagId?: string | null; + limit?: number; + offset?: number; + orderBy?: string; + warningType?: DagWarningType | null; }; -export type DeleteDagRunResponse = void; +export type ListDagWarningsResponse = DAGWarningCollectionResponse; -export type GetHealthResponse = HealthInfoSchema; +export type GetPluginsData = { + limit?: number; + offset?: number; +}; + +export type GetPluginsResponse = PluginCollectionResponse; + +export type DeletePoolData = { + poolName: string; +}; + +export type DeletePoolResponse = void; + +export type GetPoolData = { + poolName: string; +}; + +export type GetPoolResponse = PoolResponse; + +export type PatchPoolData = { + poolName: string; + requestBody: PoolPatchBody; + updateMask?: Array | null; +}; + +export type PatchPoolResponse = PoolResponse; + +export type GetPoolsData = { + limit?: number; + offset?: number; + orderBy?: string; +}; + +export type GetPoolsResponse = PoolCollectionResponse; + +export type PostPoolData = { + requestBody: PoolPostBody; +}; + +export type PostPoolResponse = PoolResponse; + +export type GetProvidersData = { + limit?: number; + offset?: number; +}; + +export type GetProvidersResponse = ProviderCollectionResponse; + +export type GetTaskInstanceData = { + dagId: string; + dagRunId: string; + taskId: string; +}; + +export type GetTaskInstanceResponse = TaskInstanceResponse; + +export type GetMappedTaskInstanceData = { + dagId: string; + dagRunId: string; + mapIndex: number; + taskId: string; +}; + +export type GetMappedTaskInstanceResponse = TaskInstanceResponse; + +export type DeleteVariableData = { + variableKey: string; +}; + +export type DeleteVariableResponse = void; + +export type GetVariableData = { + variableKey: string; +}; + +export type GetVariableResponse = VariableResponse; + +export type PatchVariableData = { + requestBody: VariableBody; + updateMask?: Array | null; + variableKey: string; +}; + +export type PatchVariableResponse = VariableResponse; + +export type GetVariablesData = { + limit?: number; + offset?: number; + orderBy?: string; +}; + +export type GetVariablesResponse = VariableCollectionResponse; + +export type PostVariableData = { + requestBody: VariableBody; +}; + +export type PostVariableResponse = VariableResponse; + +export type GetVersionResponse = VersionInfo; export type $OpenApiTs = { "/ui/next_run_assets/{dag_id}": { get: { - req: NextRunAssetsData; + req: NextRunAssetsData; + res: { + /** + * Successful Response + */ + 200: { + [key: string]: unknown; + }; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/ui/dashboard/historical_metrics_data": { + get: { + req: HistoricalMetricsData; + res: { + /** + * Successful Response + */ + 200: HistoricalMetricDataResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/ui/dags/recent_dag_runs": { + get: { + req: RecentDagRunsData; + res: { + /** + * Successful Response + */ + 200: DAGWithLatestDagRunsCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/backfills/": { + get: { + req: ListBackfillsData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: CreateBackfillData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/backfills/{backfill_id}": { + get: { + req: GetBackfillData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/backfills/{backfill_id}/pause": { + put: { + req: PauseBackfillData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/backfills/{backfill_id}/unpause": { + put: { + req: UnpauseBackfillData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/backfills/{backfill_id}/cancel": { + put: { + req: CancelBackfillData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Conflict + */ + 409: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/": { + get: { + req: GetDagsData; + res: { + /** + * Successful Response + */ + 200: DAGCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + patch: { + req: PatchDagsData; + res: { + /** + * Successful Response + */ + 200: DAGCollectionResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/tags": { + get: { + req: GetDagTagsData; + res: { + /** + * Successful Response + */ + 200: DAGTagCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/{dag_id}": { + get: { + req: GetDagData; + res: { + /** + * Successful Response + */ + 200: DAGResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; + patch: { + req: PatchDagData; + res: { + /** + * Successful Response + */ + 200: DAGResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + delete: { + req: DeleteDagData; + res: { + /** + * Successful Response + */ + 200: unknown; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; + }; + "/public/dags/{dag_id}/details": { + get: { + req: GetDagDetailsData; + res: { + /** + * Successful Response + */ + 200: DAGDetailsResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Unprocessable Entity + */ + 422: HTTPExceptionResponse; + }; + }; + }; + "/public/connections/{connection_id}": { + delete: { + req: DeleteConnectionData; + res: { + /** + * Successful Response + */ + 204: void; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + get: { + req: GetConnectionData; res: { /** * Successful Response */ - 200: { - [key: string]: unknown; - }; + 200: ConnectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; /** * Validation Error */ @@ -468,18 +1432,26 @@ export type $OpenApiTs = { }; }; }; - "/ui/dashboard/historical_metrics_data": { + "/public/connections/": { get: { - req: HistoricalMetricsData; + req: GetConnectionsData; res: { /** * Successful Response */ - 200: HistoricalMetricDataResponse; + 200: ConnectionCollectionResponse; /** - * Bad Request + * Unauthorized */ - 400: HTTPExceptionResponse; + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; /** * Validation Error */ @@ -487,27 +1459,39 @@ export type $OpenApiTs = { }; }; }; - "/public/dags/": { + "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { get: { - req: GetDagsData; + req: GetDagRunData; res: { /** * Successful Response */ - 200: DAGCollectionResponse; + 200: DAGRunResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - patch: { - req: PatchDagsData; + delete: { + req: DeleteDagRunData; res: { /** * Successful Response */ - 200: DAGCollectionResponse; + 204: void; /** * Bad Request */ @@ -530,15 +1514,13 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; - }; - "/public/dags/{dag_id}": { - get: { - req: GetDagData; + patch: { + req: PatchDagRunStateData; res: { /** * Successful Response */ - 200: DAGResponse; + 200: DAGRunResponse; /** * Bad Request */ @@ -556,18 +1538,20 @@ export type $OpenApiTs = { */ 404: HTTPExceptionResponse; /** - * Unprocessable Entity + * Validation Error */ - 422: HTTPExceptionResponse; + 422: HTTPValidationError; }; }; - patch: { - req: PatchDagData; + }; + "/public/dagSources/{file_token}": { + get: { + req: GetDagSourceData; res: { /** * Successful Response */ - 200: DAGResponse; + 200: DAGSourceResponse; /** * Bad Request */ @@ -584,23 +1568,25 @@ export type $OpenApiTs = { * Not Found */ 404: HTTPExceptionResponse; + /** + * Not Acceptable + */ + 406: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - delete: { - req: DeleteDagData; + }; + "/public/eventLogs/{event_log_id}": { + get: { + req: GetEventLogData; res: { /** * Successful Response */ - 200: unknown; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; + 200: EventLogResponse; /** * Unauthorized */ @@ -614,24 +1600,30 @@ export type $OpenApiTs = { */ 404: HTTPExceptionResponse; /** - * Unprocessable Entity + * Validation Error */ - 422: HTTPExceptionResponse; + 422: HTTPValidationError; }; }; }; - "/public/dags/{dag_id}/details": { + "/public/monitor/health": { get: { - req: GetDagDetailsData; res: { /** * Successful Response */ - 200: DAGDetailsResponse; + 200: HealthInfoSchema; + }; + }; + }; + "/public/dagWarnings": { + get: { + req: ListDagWarningsData; + res: { /** - * Bad Request + * Successful Response */ - 400: HTTPExceptionResponse; + 200: DAGWarningCollectionResponse; /** * Unauthorized */ @@ -641,24 +1633,39 @@ export type $OpenApiTs = { */ 403: HTTPExceptionResponse; /** - * Not Found + * Validation Error */ - 404: HTTPExceptionResponse; + 422: HTTPValidationError; + }; + }; + }; + "/public/plugins/": { + get: { + req: GetPluginsData; + res: { /** - * Unprocessable Entity + * Successful Response */ - 422: HTTPExceptionResponse; + 200: PluginCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; }; }; }; - "/public/connections/{connection_id}": { + "/public/pools/{pool_name}": { delete: { - req: DeleteConnectionData; + req: DeletePoolData; res: { /** * Successful Response */ 204: void; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; /** * Unauthorized */ @@ -678,12 +1685,12 @@ export type $OpenApiTs = { }; }; get: { - req: GetConnectionData; + req: GetPoolData; res: { /** * Successful Response */ - 200: ConnectionResponse; + 200: PoolResponse; /** * Unauthorized */ @@ -702,15 +1709,17 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; - }; - "/public/connections/": { - get: { - req: GetConnectionsData; + patch: { + req: PatchPoolData; res: { /** * Successful Response */ - 200: ConnectionCollectionResponse; + 200: PoolResponse; + /** + * Bad Request + */ + 400: HTTPExceptionResponse; /** * Unauthorized */ @@ -730,14 +1739,14 @@ export type $OpenApiTs = { }; }; }; - "/public/variables/{variable_key}": { - delete: { - req: DeleteVariableData; + "/public/pools/": { + get: { + req: GetPoolsData; res: { /** * Successful Response */ - 204: void; + 200: PoolCollectionResponse; /** * Unauthorized */ @@ -756,13 +1765,51 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; + post: { + req: PostPoolData; + res: { + /** + * Successful Response + */ + 201: PoolResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/providers/": { get: { - req: GetVariableData; + req: GetProvidersData; res: { /** * Successful Response */ - 200: VariableResponse; + 200: ProviderCollectionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}": { + get: { + req: GetTaskInstanceData; + res: { + /** + * Successful Response + */ + 200: TaskInstanceResponse; /** * Unauthorized */ @@ -781,17 +1828,15 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; - patch: { - req: PatchVariableData; + }; + "/public/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/{map_index}": { + get: { + req: GetMappedTaskInstanceData; res: { /** * Successful Response */ - 200: VariableResponse; - /** - * Bad Request - */ - 400: HTTPExceptionResponse; + 200: TaskInstanceResponse; /** * Unauthorized */ @@ -811,14 +1856,14 @@ export type $OpenApiTs = { }; }; }; - "/public/variables/": { - post: { - req: PostVariableData; + "/public/variables/{variable_key}": { + delete: { + req: DeleteVariableData; res: { /** * Successful Response */ - 201: VariableResponse; + 204: void; /** * Unauthorized */ @@ -827,21 +1872,23 @@ export type $OpenApiTs = { * Forbidden */ 403: HTTPExceptionResponse; + /** + * Not Found + */ + 404: HTTPExceptionResponse; /** * Validation Error */ 422: HTTPValidationError; }; }; - }; - "/public/dags/{dag_id}/dagRuns/{dag_run_id}": { get: { - req: GetDagRunData; + req: GetVariableData; res: { /** * Successful Response */ - 200: DAGRunResponse; + 200: VariableResponse; /** * Unauthorized */ @@ -860,13 +1907,13 @@ export type $OpenApiTs = { 422: HTTPValidationError; }; }; - delete: { - req: DeleteDagRunData; + patch: { + req: PatchVariableData; res: { /** * Successful Response */ - 204: void; + 200: VariableResponse; /** * Bad Request */ @@ -890,13 +1937,57 @@ export type $OpenApiTs = { }; }; }; - "/public/monitor/health": { + "/public/variables/": { get: { + req: GetVariablesData; res: { /** * Successful Response */ - 200: HealthInfoSchema; + 200: VariableCollectionResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + post: { + req: PostVariableData; + res: { + /** + * Successful Response + */ + 201: VariableResponse; + /** + * Unauthorized + */ + 401: HTTPExceptionResponse; + /** + * Forbidden + */ + 403: HTTPExceptionResponse; + /** + * Validation Error + */ + 422: HTTPValidationError; + }; + }; + }; + "/public/version/": { + get: { + res: { + /** + * Successful Response + */ + 200: VersionInfo; }; }; }; diff --git a/airflow/ui/package.json b/airflow/ui/package.json index 3ca8d1a06f412..a048b1cf359de 100644 --- a/airflow/ui/package.json +++ b/airflow/ui/package.json @@ -17,20 +17,20 @@ }, "dependencies": { "@chakra-ui/anatomy": "^2.2.2", - "@chakra-ui/react": "^2.8.2", + "@chakra-ui/react": "^3.0.2", "@emotion/react": "^11.13.3", - "@emotion/styled": "^11.13.0", "@tanstack/react-query": "^5.52.1", "@tanstack/react-table": "^8.20.1", "axios": "^1.7.7", - "chakra-react-select": "^4.9.2", + "chakra-react-select": "6.0.0-next.2", "dayjs": "^1.11.13", - "framer-motion": "^11.3.29", + "next-themes": "^0.3.0", "react": "^18.3.1", "react-dom": "^18.3.1", "react-icons": "^5.3.0", "react-router-dom": "^6.26.2", - "use-debounce": "^10.0.3" + "use-debounce": "^10.0.3", + "usehooks-ts": "^3.1.0" }, "devDependencies": { "@7nohe/openapi-react-query-codegen": "^1.6.0", diff --git a/airflow/ui/pnpm-lock.yaml b/airflow/ui/pnpm-lock.yaml index 3ceee513bb134..1b2b82cd3e9f9 100644 --- a/airflow/ui/pnpm-lock.yaml +++ b/airflow/ui/pnpm-lock.yaml @@ -12,14 +12,11 @@ importers: specifier: ^2.2.2 version: 2.2.2 '@chakra-ui/react': - specifier: ^2.8.2 - version: 2.8.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: ^3.0.2 + version: 3.0.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@internationalized/date@3.5.6)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@emotion/react': specifier: ^11.13.3 version: 11.13.3(@types/react@18.3.5)(react@18.3.1) - '@emotion/styled': - specifier: ^11.13.0 - version: 11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1) '@tanstack/react-query': specifier: ^5.52.1 version: 5.52.1(react@18.3.1) @@ -30,14 +27,14 @@ importers: specifier: ^1.7.7 version: 1.7.7 chakra-react-select: - specifier: ^4.9.2 - version: 4.9.2(uzcvocchpeesoxvtkif6ppnvaq) + specifier: 6.0.0-next.2 + version: 6.0.0-next.2(@chakra-ui/react@3.0.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@internationalized/date@3.5.6)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.5)(next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) dayjs: specifier: ^1.11.13 version: 1.11.13 - framer-motion: - specifier: ^11.3.29 - version: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next-themes: + specifier: ^0.3.0 + version: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: specifier: ^18.3.1 version: 18.3.1 @@ -53,6 +50,9 @@ importers: use-debounce: specifier: ^10.0.3 version: 10.0.3(react@18.3.1) + usehooks-ts: + specifier: ^3.1.0 + version: 3.1.0(react@18.3.1) devDependencies: '@7nohe/openapi-react-query-codegen': specifier: ^1.6.0 @@ -89,7 +89,7 @@ importers: version: 18.3.0 '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(vite@5.4.6(@types/node@22.5.4)) + version: 3.7.0(@swc/helpers@0.5.13)(vite@5.4.6(@types/node@22.5.4)) '@vitest/coverage-v8': specifier: ^2.1.1 version: 2.1.1(vitest@2.1.1(@types/node@22.5.4)(happy-dom@15.0.0)) @@ -165,6 +165,15 @@ packages: resolution: {integrity: sha512-9K6xOqeevacvweLGik6LnZCb1fBtCOSIWQs8d096XGeqoLKC33UVMGz9+77Gw44KvbH4pKcQPWo4ZpxkXYj05w==} engines: {node: '>= 16'} + '@ark-ui/anatomy@3.5.0': + resolution: {integrity: sha512-KoROLVVT23BvFHcye/GYhG8NJ2CH0C+CaoJhXrkEjvk8pbEx80Xk5NIUy5gL7xmX+LDD7kY5t3NotBqCu+2L2w==} + + '@ark-ui/react@4.1.2': + resolution: {integrity: sha512-7Y8NToONNbfDngQh15GNzn4i4RLJTRRmXm9tXB09a1nKbuYICbxFcX+5IgdhvRudoIFR9r2sCbeEy69I6T13gg==} + peerDependencies: + react: '>=18.0.0' + react-dom: '>=18.0.0' + '@babel/code-frame@7.24.7': resolution: {integrity: sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==} engines: {node: '>=6.9.0'} @@ -254,479 +263,16 @@ packages: '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} - '@chakra-ui/accordion@2.3.1': - resolution: {integrity: sha512-FSXRm8iClFyU+gVaXisOSEw0/4Q+qZbFRiuhIAkVU6Boj0FxAMrlo9a8AV5TuF77rgaHytCdHk0Ng+cyUijrag==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - framer-motion: '>=4.0.0' - react: '>=18' - - '@chakra-ui/alert@2.2.2': - resolution: {integrity: sha512-jHg4LYMRNOJH830ViLuicjb3F+v6iriE/2G5T+Sd0Hna04nukNJ1MxUmBPE+vI22me2dIflfelu2v9wdB6Pojw==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - '@chakra-ui/anatomy@2.2.2': resolution: {integrity: sha512-MV6D4VLRIHr4PkW4zMyqfrNS1mPlCTiCXwvYGtDFQYr+xHFfonhAuf9WjsSc0nyp2m0OdkSLnzmVKkZFLo25Tg==} - '@chakra-ui/avatar@2.3.0': - resolution: {integrity: sha512-8gKSyLfygnaotbJbDMHDiJoF38OHXUYVme4gGxZ1fLnQEdPVEaIWfH+NndIjOM0z8S+YEFnT9KyGMUtvPrBk3g==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/breadcrumb@2.2.0': - resolution: {integrity: sha512-4cWCG24flYBxjruRi4RJREWTGF74L/KzI2CognAW/d/zWR0CjiScuJhf37Am3LFbCySP6WSoyBOtTIoTA4yLEA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/breakpoint-utils@2.0.8': - resolution: {integrity: sha512-Pq32MlEX9fwb5j5xx8s18zJMARNHlQZH2VH1RZgfgRDpp7DcEgtRW5AInfN5CfqdHLO1dGxA7I3MqEuL5JnIsA==} - - '@chakra-ui/button@2.1.0': - resolution: {integrity: sha512-95CplwlRKmmUXkdEp/21VkEWgnwcx2TOBG6NfYlsuLBDHSLlo5FKIiE2oSi4zXc4TLcopGcWPNcm/NDaSC5pvA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/card@2.2.0': - resolution: {integrity: sha512-xUB/k5MURj4CtPAhdSoXZidUbm8j3hci9vnc+eZJVDqhDOShNlD6QeniQNRPRys4lWAQLCbFcrwL29C8naDi6g==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/checkbox@2.3.2': - resolution: {integrity: sha512-85g38JIXMEv6M+AcyIGLh7igNtfpAN6KGQFYxY9tBj0eWvWk4NKQxvqqyVta0bSAyIl1rixNIIezNpNWk2iO4g==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/clickable@2.1.0': - resolution: {integrity: sha512-flRA/ClPUGPYabu+/GLREZVZr9j2uyyazCAUHAdrTUEdDYCr31SVGhgh7dgKdtq23bOvAQJpIJjw/0Bs0WvbXw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/close-button@2.1.1': - resolution: {integrity: sha512-gnpENKOanKexswSVpVz7ojZEALl2x5qjLYNqSQGbxz+aP9sOXPfUS56ebyBrre7T7exuWGiFeRwnM0oVeGPaiw==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/color-mode@2.2.0': - resolution: {integrity: sha512-niTEA8PALtMWRI9wJ4LL0CSBDo8NBfLNp4GD6/0hstcm3IlbBHTVKxN6HwSaoNYfphDQLxCjT4yG+0BJA5tFpg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/control-box@2.1.0': - resolution: {integrity: sha512-gVrRDyXFdMd8E7rulL0SKeoljkLQiPITFnsyMO8EFHNZ+AHt5wK4LIguYVEq88APqAGZGfHFWXr79RYrNiE3Mg==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/counter@2.1.0': - resolution: {integrity: sha512-s6hZAEcWT5zzjNz2JIWUBzRubo9la/oof1W7EKZVVfPYHERnl5e16FmBC79Yfq8p09LQ+aqFKm/etYoJMMgghw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/css-reset@2.3.0': - resolution: {integrity: sha512-cQwwBy5O0jzvl0K7PLTLgp8ijqLPKyuEMiDXwYzl95seD3AoeuoCLyzZcJtVqaUZ573PiBdAbY/IlZcwDOItWg==} - peerDependencies: - '@emotion/react': '>=10.0.35' - react: '>=18' - - '@chakra-ui/descendant@3.1.0': - resolution: {integrity: sha512-VxCIAir08g5w27klLyi7PVo8BxhW4tgU/lxQyujkmi4zx7hT9ZdrcQLAted/dAa+aSIZ14S1oV0Q9lGjsAdxUQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/dom-utils@2.1.0': - resolution: {integrity: sha512-ZmF2qRa1QZ0CMLU8M1zCfmw29DmPNtfjR9iTo74U5FPr3i1aoAh7fbJ4qAlZ197Xw9eAW28tvzQuoVWeL5C7fQ==} - - '@chakra-ui/editable@3.1.0': - resolution: {integrity: sha512-j2JLrUL9wgg4YA6jLlbU88370eCRyor7DZQD9lzpY95tSOXpTljeg3uF9eOmDnCs6fxp3zDWIfkgMm/ExhcGTg==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/event-utils@2.0.8': - resolution: {integrity: sha512-IGM/yGUHS+8TOQrZGpAKOJl/xGBrmRYJrmbHfUE7zrG3PpQyXvbLDP1M+RggkCFVgHlJi2wpYIf0QtQlU0XZfw==} - - '@chakra-ui/focus-lock@2.1.0': - resolution: {integrity: sha512-EmGx4PhWGjm4dpjRqM4Aa+rCWBxP+Rq8Uc/nAVnD4YVqkEhBkrPTpui2lnjsuxqNaZ24fIAZ10cF1hlpemte/w==} - peerDependencies: - react: '>=18' - - '@chakra-ui/form-control@2.2.0': - resolution: {integrity: sha512-wehLC1t4fafCVJ2RvJQT2jyqsAwX7KymmiGqBu7nQoQz8ApTkGABWpo/QwDh3F/dBLrouHDoOvGmYTqft3Mirw==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/hooks@2.2.1': - resolution: {integrity: sha512-RQbTnzl6b1tBjbDPf9zGRo9rf/pQMholsOudTxjy4i9GfTfz6kgp5ValGjQm2z7ng6Z31N1cnjZ1AlSzQ//ZfQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/icon@3.2.0': - resolution: {integrity: sha512-xxjGLvlX2Ys4H0iHrI16t74rG9EBcpFvJ3Y3B7KMQTrnW34Kf7Da/UC8J67Gtx85mTHW020ml85SVPKORWNNKQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/image@2.1.0': - resolution: {integrity: sha512-bskumBYKLiLMySIWDGcz0+D9Th0jPvmX6xnRMs4o92tT3Od/bW26lahmV2a2Op2ItXeCmRMY+XxJH5Gy1i46VA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/input@2.1.2': - resolution: {integrity: sha512-GiBbb3EqAA8Ph43yGa6Mc+kUPjh4Spmxp1Pkelr8qtudpc3p2PJOOebLpd90mcqw8UePPa+l6YhhPtp6o0irhw==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/layout@2.3.1': - resolution: {integrity: sha512-nXuZ6WRbq0WdgnRgLw+QuxWAHuhDtVX8ElWqcTK+cSMFg/52eVP47czYBE5F35YhnoW2XBwfNoNgZ7+e8Z01Rg==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/lazy-utils@2.0.5': - resolution: {integrity: sha512-UULqw7FBvcckQk2n3iPO56TMJvDsNv0FKZI6PlUNJVaGsPbsYxK/8IQ60vZgaTVPtVcjY6BE+y6zg8u9HOqpyg==} - - '@chakra-ui/live-region@2.1.0': - resolution: {integrity: sha512-ZOxFXwtaLIsXjqnszYYrVuswBhnIHHP+XIgK1vC6DePKtyK590Wg+0J0slDwThUAd4MSSIUa/nNX84x1GMphWw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/media-query@3.3.0': - resolution: {integrity: sha512-IsTGgFLoICVoPRp9ykOgqmdMotJG0CnPsKvGQeSFOB/dZfIujdVb14TYxDU4+MURXry1MhJ7LzZhv+Ml7cr8/g==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/menu@2.2.1': - resolution: {integrity: sha512-lJS7XEObzJxsOwWQh7yfG4H8FzFPRP5hVPN/CL+JzytEINCSBvsCDHrYPQGp7jzpCi8vnTqQQGQe0f8dwnXd2g==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - framer-motion: '>=4.0.0' - react: '>=18' - - '@chakra-ui/modal@2.3.1': - resolution: {integrity: sha512-TQv1ZaiJMZN+rR9DK0snx/OPwmtaGH1HbZtlYt4W4s6CzyK541fxLRTjIXfEzIGpvNW+b6VFuFjbcR78p4DEoQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - framer-motion: '>=4.0.0' - react: '>=18' - react-dom: '>=18' - - '@chakra-ui/number-input@2.1.2': - resolution: {integrity: sha512-pfOdX02sqUN0qC2ysuvgVDiws7xZ20XDIlcNhva55Jgm095xjm8eVdIBfNm3SFbSUNxyXvLTW/YQanX74tKmuA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/number-utils@2.0.7': - resolution: {integrity: sha512-yOGxBjXNvLTBvQyhMDqGU0Oj26s91mbAlqKHiuw737AXHt0aPllOthVUqQMeaYLwLCjGMg0jtI7JReRzyi94Dg==} - - '@chakra-ui/object-utils@2.1.0': - resolution: {integrity: sha512-tgIZOgLHaoti5PYGPTwK3t/cqtcycW0owaiOXoZOcpwwX/vlVb+H1jFsQyWiiwQVPt9RkoSLtxzXamx+aHH+bQ==} - - '@chakra-ui/pin-input@2.1.0': - resolution: {integrity: sha512-x4vBqLStDxJFMt+jdAHHS8jbh294O53CPQJoL4g228P513rHylV/uPscYUHrVJXRxsHfRztQO9k45jjTYaPRMw==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/popover@2.2.1': - resolution: {integrity: sha512-K+2ai2dD0ljvJnlrzesCDT9mNzLifE3noGKZ3QwLqd/K34Ym1W/0aL1ERSynrcG78NKoXS54SdEzkhCZ4Gn/Zg==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - framer-motion: '>=4.0.0' - react: '>=18' - - '@chakra-ui/popper@3.1.0': - resolution: {integrity: sha512-ciDdpdYbeFG7og6/6J8lkTFxsSvwTdMLFkpVylAF6VNC22jssiWfquj2eyD4rJnzkRFPvIWJq8hvbfhsm+AjSg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/portal@2.1.0': - resolution: {integrity: sha512-9q9KWf6SArEcIq1gGofNcFPSWEyl+MfJjEUg/un1SMlQjaROOh3zYr+6JAwvcORiX7tyHosnmWC3d3wI2aPSQg==} - peerDependencies: - react: '>=18' - react-dom: '>=18' - - '@chakra-ui/progress@2.2.0': - resolution: {integrity: sha512-qUXuKbuhN60EzDD9mHR7B67D7p/ZqNS2Aze4Pbl1qGGZfulPW0PY8Rof32qDtttDQBkzQIzFGE8d9QpAemToIQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/provider@2.4.2': - resolution: {integrity: sha512-w0Tef5ZCJK1mlJorcSjItCSbyvVuqpvyWdxZiVQmE6fvSJR83wZof42ux0+sfWD+I7rHSfj+f9nzhNaEWClysw==} - peerDependencies: - '@emotion/react': ^11.0.0 - '@emotion/styled': ^11.0.0 - react: '>=18' - react-dom: '>=18' - - '@chakra-ui/radio@2.1.2': - resolution: {integrity: sha512-n10M46wJrMGbonaghvSRnZ9ToTv/q76Szz284gv4QUWvyljQACcGrXIONUnQ3BIwbOfkRqSk7Xl/JgZtVfll+w==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/react-children-utils@2.0.6': - resolution: {integrity: sha512-QVR2RC7QsOsbWwEnq9YduhpqSFnZGvjjGREV8ygKi8ADhXh93C8azLECCUVgRJF2Wc+So1fgxmjLcbZfY2VmBA==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-context@2.1.0': - resolution: {integrity: sha512-iahyStvzQ4AOwKwdPReLGfDesGG+vWJfEsn0X/NoGph/SkN+HXtv2sCfYFFR9k7bb+Kvc6YfpLlSuLvKMHi2+w==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-env@3.1.0': - resolution: {integrity: sha512-Vr96GV2LNBth3+IKzr/rq1IcnkXv+MLmwjQH6C8BRtn3sNskgDFD5vLkVXcEhagzZMCh8FR3V/bzZPojBOyNhw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-types@2.0.7': - resolution: {integrity: sha512-12zv2qIZ8EHwiytggtGvo4iLT0APris7T0qaAWqzpUGS0cdUtR8W+V1BJ5Ocq+7tA6dzQ/7+w5hmXih61TuhWQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-animation-state@2.1.0': - resolution: {integrity: sha512-CFZkQU3gmDBwhqy0vC1ryf90BVHxVN8cTLpSyCpdmExUEtSEInSCGMydj2fvn7QXsz/za8JNdO2xxgJwxpLMtg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-callback-ref@2.1.0': - resolution: {integrity: sha512-efnJrBtGDa4YaxDzDE90EnKD3Vkh5a1t3w7PhnRQmsphLy3g2UieasoKTlT2Hn118TwDjIv5ZjHJW6HbzXA9wQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-controllable-state@2.1.0': - resolution: {integrity: sha512-QR/8fKNokxZUs4PfxjXuwl0fj/d71WPrmLJvEpCTkHjnzu7LnYvzoe2wB867IdooQJL0G1zBxl0Dq+6W1P3jpg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-disclosure@2.1.0': - resolution: {integrity: sha512-Ax4pmxA9LBGMyEZJhhUZobg9C0t3qFE4jVF1tGBsrLDcdBeLR9fwOogIPY9Hf0/wqSlAryAimICbr5hkpa5GSw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-event-listener@2.1.0': - resolution: {integrity: sha512-U5greryDLS8ISP69DKDsYcsXRtAdnTQT+jjIlRYZ49K/XhUR/AqVZCK5BkR1spTDmO9H8SPhgeNKI70ODuDU/Q==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-focus-effect@2.1.0': - resolution: {integrity: sha512-xzVboNy7J64xveLcxTIJ3jv+lUJKDwRM7Szwn9tNzUIPD94O3qwjV7DDCUzN2490nSYDF4OBMt/wuDBtaR3kUQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-focus-on-pointer-down@2.1.0': - resolution: {integrity: sha512-2jzrUZ+aiCG/cfanrolsnSMDykCAbv9EK/4iUyZno6BYb3vziucmvgKuoXbMPAzWNtwUwtuMhkby8rc61Ue+Lg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-interval@2.1.0': - resolution: {integrity: sha512-8iWj+I/+A0J08pgEXP1J1flcvhLBHkk0ln7ZvGIyXiEyM6XagOTJpwNhiu+Bmk59t3HoV/VyvyJTa+44sEApuw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-latest-ref@2.1.0': - resolution: {integrity: sha512-m0kxuIYqoYB0va9Z2aW4xP/5b7BzlDeWwyXCH6QpT2PpW3/281L3hLCm1G0eOUcdVlayqrQqOeD6Mglq+5/xoQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-merge-refs@2.1.0': - resolution: {integrity: sha512-lERa6AWF1cjEtWSGjxWTaSMvneccnAVH4V4ozh8SYiN9fSPZLlSG3kNxfNzdFvMEhM7dnP60vynF7WjGdTgQbQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-outside-click@2.2.0': - resolution: {integrity: sha512-PNX+s/JEaMneijbgAM4iFL+f3m1ga9+6QK0E5Yh4s8KZJQ/bLwZzdhMz8J/+mL+XEXQ5J0N8ivZN28B82N1kNw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-pan-event@2.1.0': - resolution: {integrity: sha512-xmL2qOHiXqfcj0q7ZK5s9UjTh4Gz0/gL9jcWPA6GVf+A0Od5imEDa/Vz+533yQKWiNSm1QGrIj0eJAokc7O4fg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-previous@2.1.0': - resolution: {integrity: sha512-pjxGwue1hX8AFcmjZ2XfrQtIJgqbTF3Qs1Dy3d1krC77dEsiCUbQ9GzOBfDc8pfd60DrB5N2tg5JyHbypqh0Sg==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-safe-layout-effect@2.1.0': - resolution: {integrity: sha512-Knbrrx/bcPwVS1TorFdzrK/zWA8yuU/eaXDkNj24IrKoRlQrSBFarcgAEzlCHtzuhufP3OULPkELTzz91b0tCw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-size@2.1.0': - resolution: {integrity: sha512-tbLqrQhbnqOjzTaMlYytp7wY8BW1JpL78iG7Ru1DlV4EWGiAmXFGvtnEt9HftU0NJ0aJyjgymkxfVGI55/1Z4A==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-timeout@2.1.0': - resolution: {integrity: sha512-cFN0sobKMM9hXUhyCofx3/Mjlzah6ADaEl/AXl5Y+GawB5rgedgAcu2ErAgarEkwvsKdP6c68CKjQ9dmTQlJxQ==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-use-update-effect@2.1.0': - resolution: {integrity: sha512-ND4Q23tETaR2Qd3zwCKYOOS1dfssojPLJMLvUtUbW5M9uW1ejYWgGUobeAiOVfSplownG8QYMmHTP86p/v0lbA==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react-utils@2.0.12': - resolution: {integrity: sha512-GbSfVb283+YA3kA8w8xWmzbjNWk14uhNpntnipHCftBibl0lxtQ9YqMFQLwuFOO0U2gYVocszqqDWX+XNKq9hw==} - peerDependencies: - react: '>=18' - - '@chakra-ui/react@2.8.2': - resolution: {integrity: sha512-Hn0moyxxyCDKuR9ywYpqgX8dvjqwu9ArwpIb9wHNYjnODETjLwazgNIliCVBRcJvysGRiV51U2/JtJVrpeCjUQ==} - peerDependencies: - '@emotion/react': ^11.0.0 - '@emotion/styled': ^11.0.0 - framer-motion: '>=4.0.0' - react: '>=18' - react-dom: '>=18' - - '@chakra-ui/select@2.1.2': - resolution: {integrity: sha512-ZwCb7LqKCVLJhru3DXvKXpZ7Pbu1TDZ7N0PdQ0Zj1oyVLJyrpef1u9HR5u0amOpqcH++Ugt0f5JSmirjNlctjA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/shared-utils@2.0.5': - resolution: {integrity: sha512-4/Wur0FqDov7Y0nCXl7HbHzCg4aq86h+SXdoUeuCMD3dSj7dpsVnStLYhng1vxvlbUnLpdF4oz5Myt3i/a7N3Q==} - - '@chakra-ui/skeleton@2.1.0': - resolution: {integrity: sha512-JNRuMPpdZGd6zFVKjVQ0iusu3tXAdI29n4ZENYwAJEMf/fN0l12sVeirOxkJ7oEL0yOx2AgEYFSKdbcAgfUsAQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/skip-nav@2.1.0': - resolution: {integrity: sha512-Hk+FG+vadBSH0/7hwp9LJnLjkO0RPGnx7gBJWI4/SpoJf3e4tZlWYtwGj0toYY4aGKl93jVghuwGbDBEMoHDug==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/slider@2.1.0': - resolution: {integrity: sha512-lUOBcLMCnFZiA/s2NONXhELJh6sY5WtbRykPtclGfynqqOo47lwWJx+VP7xaeuhDOPcWSSecWc9Y1BfPOCz9cQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/spinner@2.1.0': - resolution: {integrity: sha512-hczbnoXt+MMv/d3gE+hjQhmkzLiKuoTo42YhUG7Bs9OSv2lg1fZHW1fGNRFP3wTi6OIbD044U1P9HK+AOgFH3g==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/stat@2.1.1': - resolution: {integrity: sha512-LDn0d/LXQNbAn2KaR3F1zivsZCewY4Jsy1qShmfBMKwn6rI8yVlbvu6SiA3OpHS0FhxbsZxQI6HefEoIgtqY6Q==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/stepper@2.3.1': - resolution: {integrity: sha512-ky77lZbW60zYkSXhYz7kbItUpAQfEdycT0Q4bkHLxfqbuiGMf8OmgZOQkOB9uM4v0zPwy2HXhe0vq4Dd0xa55Q==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/styled-system@2.9.2': - resolution: {integrity: sha512-To/Z92oHpIE+4nk11uVMWqo2GGRS86coeMmjxtpnErmWRdLcp1WVCVRAvn+ZwpLiNR+reWFr2FFqJRsREuZdAg==} - - '@chakra-ui/switch@2.1.2': - resolution: {integrity: sha512-pgmi/CC+E1v31FcnQhsSGjJnOE2OcND4cKPyTE+0F+bmGm48Q/b5UmKD9Y+CmZsrt/7V3h8KNczowupfuBfIHA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - framer-motion: '>=4.0.0' - react: '>=18' - - '@chakra-ui/system@2.6.2': - resolution: {integrity: sha512-EGtpoEjLrUu4W1fHD+a62XR+hzC5YfsWm+6lO0Kybcga3yYEij9beegO0jZgug27V+Rf7vns95VPVP6mFd/DEQ==} - peerDependencies: - '@emotion/react': ^11.0.0 - '@emotion/styled': ^11.0.0 - react: '>=18' - - '@chakra-ui/table@2.1.0': - resolution: {integrity: sha512-o5OrjoHCh5uCLdiUb0Oc0vq9rIAeHSIRScc2ExTC9Qg/uVZl2ygLrjToCaKfaaKl1oQexIeAcZDKvPG8tVkHyQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/tabs@3.0.0': - resolution: {integrity: sha512-6Mlclp8L9lqXmsGWF5q5gmemZXOiOYuh0SGT/7PgJVNPz3LXREXlXg2an4MBUD8W5oTkduCX+3KTMCwRrVrDYw==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/tag@3.1.1': - resolution: {integrity: sha512-Bdel79Dv86Hnge2PKOU+t8H28nm/7Y3cKd4Kfk9k3lOpUh4+nkSGe58dhRzht59lEqa4N9waCgQiBdkydjvBXQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/textarea@2.1.2': - resolution: {integrity: sha512-ip7tvklVCZUb2fOHDb23qPy/Fr2mzDOGdkrpbNi50hDCiV4hFX02jdQJdi3ydHZUyVgZVBKPOJ+lT9i7sKA2wA==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - - '@chakra-ui/theme-tools@2.1.2': - resolution: {integrity: sha512-Qdj8ajF9kxY4gLrq7gA+Azp8CtFHGO9tWMN2wfF9aQNgG9AuMhPrUzMq9AMQ0MXiYcgNq/FD3eegB43nHVmXVA==} - peerDependencies: - '@chakra-ui/styled-system': '>=2.0.0' - - '@chakra-ui/theme-utils@2.0.21': - resolution: {integrity: sha512-FjH5LJbT794r0+VSCXB3lT4aubI24bLLRWB+CuRKHijRvsOg717bRdUN/N1fEmEpFnRVrbewttWh/OQs0EWpWw==} - - '@chakra-ui/theme@3.3.1': - resolution: {integrity: sha512-Hft/VaT8GYnItGCBbgWd75ICrIrIFrR7lVOhV/dQnqtfGqsVDlrztbSErvMkoPKt0UgAkd9/o44jmZ6X4U2nZQ==} - peerDependencies: - '@chakra-ui/styled-system': '>=2.8.0' - - '@chakra-ui/toast@7.0.2': - resolution: {integrity: sha512-yvRP8jFKRs/YnkuE41BVTq9nB2v/KDRmje9u6dgDmE5+1bFt3bwjdf9gVbif4u5Ve7F7BGk5E093ARRVtvLvXA==} - peerDependencies: - '@chakra-ui/system': 2.6.2 - framer-motion: '>=4.0.0' - react: '>=18' - react-dom: '>=18' - - '@chakra-ui/tooltip@2.3.1': - resolution: {integrity: sha512-Rh39GBn/bL4kZpuEMPPRwYNnccRCL+w9OqamWHIB3Qboxs6h8cOyXfIdGxjo72lvhu1QI/a4KFqkM3St+WfC0A==} + '@chakra-ui/react@3.0.2': + resolution: {integrity: sha512-xfW0tGCLJSvNkMFrz4zyjvImugwVb4gYvsGOT9hISMhDvh20ciS3WEI8S+jia4ytkl5yI4H0KPyuMjQt/gM69Q==} peerDependencies: - '@chakra-ui/system': '>=2.0.0' - framer-motion: '>=4.0.0' + '@emotion/react': '>=11' react: '>=18' react-dom: '>=18' - '@chakra-ui/transition@2.1.0': - resolution: {integrity: sha512-orkT6T/Dt+/+kVwJNy7zwJ+U2xAZ3EU7M3XCs45RBvUnZDr/u9vdmaM/3D/rOpmQJWgQBwKPJleUXrYWUagEDQ==} - peerDependencies: - framer-motion: '>=4.0.0' - react: '>=18' - - '@chakra-ui/utils@2.0.15': - resolution: {integrity: sha512-El4+jL0WSaYYs+rJbuYFDbjmfCcfGDmRY95GO4xwzit6YAPZBLcR65rOEwLps+XWluZTy1xdMrusg/hW0c1aAA==} - - '@chakra-ui/visually-hidden@2.2.0': - resolution: {integrity: sha512-KmKDg01SrQ7VbTD3+cPWf/UfpF5MSwm3v7MWi0n5t8HnnadT13MF0MJCDSXbBWnzLv1ZKJ6zlyAOeARWX+DpjQ==} - peerDependencies: - '@chakra-ui/system': '>=2.0.0' - react: '>=18' - '@emotion/babel-plugin@11.12.0': resolution: {integrity: sha512-y2WQb+oP8Jqvvclh8Q55gLUyb7UFvgv7eJfsj7td5TToBrIUtPay2kMrZi4xjq9qw2vD0ZR5fSho0yqoFgX7Rw==} @@ -736,8 +282,8 @@ packages: '@emotion/hash@0.9.2': resolution: {integrity: sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==} - '@emotion/is-prop-valid@1.3.0': - resolution: {integrity: sha512-SHetuSLvJDzuNbOdtPVbq6yMMMlLoW5Q94uDqJZqy50gcmAjxFkVqmzqSGEFq9gT2iMuIeKV1PXVWmvUhuZLlQ==} + '@emotion/is-prop-valid@1.3.1': + resolution: {integrity: sha512-/ACwoqx7XQi9knQs/G0qKvv5teDMhD7bXYns9N/wM8ah8iNb8jZ2uNO0YOgiq2o2poIvVtJS2YALasQuMSQ7Kw==} '@emotion/memoize@0.9.0': resolution: {integrity: sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==} @@ -754,19 +300,12 @@ packages: '@emotion/serialize@1.3.1': resolution: {integrity: sha512-dEPNKzBPU+vFPGa+z3axPRn8XVDetYORmDC0wAiej+TNcOZE70ZMJa0X7JdeoM6q/nWTMZeLpN/fTnD9o8MQBA==} + '@emotion/serialize@1.3.2': + resolution: {integrity: sha512-grVnMvVPK9yUVE6rkKfAJlYZgo0cu3l9iMC77V7DW6E1DUIrU68pSEXRmFZFOFB1QFo57TncmOcvcbMDWsL4yA==} + '@emotion/sheet@1.4.0': resolution: {integrity: sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg==} - '@emotion/styled@11.13.0': - resolution: {integrity: sha512-tkzkY7nQhW/zC4hztlwucpT8QEZ6eUzpXDRhww/Eej4tFfO0FxQYWRyg/c5CCXa4d/f174kqeXYjuQRnhzf6dA==} - peerDependencies: - '@emotion/react': ^11.0.0-rc.0 - '@types/react': '*' - react: '>=16.8.0' - peerDependenciesMeta: - '@types/react': - optional: true - '@emotion/unitless@0.10.0': resolution: {integrity: sha512-dFoMUuQA20zvtVTuxZww6OHoJYgrzfKM1t52mVySDJnMSEa08ruEvdYQbhvyu6soU+NeLVd3yKfTfT0NeV6qGg==} @@ -778,6 +317,9 @@ packages: '@emotion/utils@1.4.0': resolution: {integrity: sha512-spEnrA1b6hDR/C68lC2M7m6ALPUHZC0lIY7jAS/B/9DuuO1ZP04eov8SMv/6fwRd8pzmsn2AuJEznRREWlQrlQ==} + '@emotion/utils@1.4.1': + resolution: {integrity: sha512-BymCXzCG3r72VKJxaYVwOXATqXIZ85cuvg0YOUDxMGNrKc1DJRZk8MgV5wyXRyEayIMd4FuXJIUgTBXvDNW5cA==} + '@emotion/weak-memoize@0.4.0': resolution: {integrity: sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==} @@ -956,12 +498,18 @@ packages: '@floating-ui/core@1.6.7': resolution: {integrity: sha512-yDzVT/Lm101nQ5TCVeK65LtdN7Tj4Qpr9RTXJ2vPFLqtLxwOrpoxAHAJI8J3yYWUc40J0BDBheaitK5SJmno2g==} - '@floating-ui/dom@1.6.10': - resolution: {integrity: sha512-fskgCFv8J8OamCmyun8MfjB1Olfn+uZKjOKZ0vhYF3gRmEUXcGOjxWL8bBr7i4kIuPZ2KD2S3EUIOxnjC8kl2A==} + '@floating-ui/dom@1.6.11': + resolution: {integrity: sha512-qkMCxSR24v2vGkhYDo/UzxfJN3D4syqSjyuTFz6C7XcpU1pASPRieNI0Kj5VP3/503mOfYiGY891ugBX1GlABQ==} + + '@floating-ui/dom@1.6.8': + resolution: {integrity: sha512-kx62rP19VZ767Q653wsP1XZCGIirkE09E0QUGNYTM/ttbbQHqcGPdSfWFxUyyNLc/W6aoJRBajOSXhP6GXjC0Q==} '@floating-ui/utils@0.2.7': resolution: {integrity: sha512-X8R8Oj771YRl/w+c1HqAC1szL8zWQRwFvgDwT129k9ACdBoud/+/rX9V0qiMl6LWUdP9voC2nDVZYPMQQsb6eA==} + '@floating-ui/utils@0.2.8': + resolution: {integrity: sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==} + '@hey-api/openapi-ts@0.52.0': resolution: {integrity: sha512-DA3Zf5ONxMK1PUkK88lAuYbXMgn5BvU5sjJdTAO2YOn6Eu/9ovilBztMzvu8pyY44PmL3n4ex4+f+XIwvgfhvw==} engines: {node: ^18.0.0 || >=20.0.0} @@ -977,6 +525,15 @@ packages: resolution: {integrity: sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew==} engines: {node: '>=18.18'} + '@internationalized/date@3.5.5': + resolution: {integrity: sha512-H+CfYvOZ0LTJeeLOqm19E3uj/4YjrmOFtBufDHPfvtI80hFAMqtrp7oCACpe4Cil5l8S0Qu/9dYfZc/5lY8WQQ==} + + '@internationalized/date@3.5.6': + resolution: {integrity: sha512-jLxQjefH9VI5P9UQuqB6qNKnvFt1Ky1TPIzHGsIlCi7sZZoMR8SdYbBGRvM0y+Jtb+ez4ieBzmiAUcpmPYpyOw==} + + '@internationalized/number@3.5.3': + resolution: {integrity: sha512-rd1wA3ebzlp0Mehj5YTuTI50AQEx80gWFyHcQu+u91/5NgdwBecO8BH6ipPfE+lmQ9d63vpB3H9SHoIUiupllw==} + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -1018,6 +575,9 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} + '@pandacss/is-valid-prop@0.41.0': + resolution: {integrity: sha512-BE6h6CsJk14ugIRrsazJtN3fcg+KDFRat1Bs93YFKH6jd4DOb1yUyVvC70jKqPVvg70zEcV8acZ7VdcU5TLu+w==} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -1026,9 +586,6 @@ packages: resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@popperjs/core@2.11.8': - resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==} - '@remix-run/router@1.19.2': resolution: {integrity: sha512-baiMx18+IMuD1yyvOGaHM9QrVUPGGG0jC+z+IPHnRJWUAUvaKuWKyE8gjDj2rzv3sz9zOGoRSPgeBVHRhZnBlA==} engines: {node: '>=14.0.0'} @@ -1191,6 +748,9 @@ packages: '@swc/counter@0.1.3': resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==} + '@swc/helpers@0.5.13': + resolution: {integrity: sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w==} + '@swc/types@0.1.12': resolution: {integrity: sha512-wBJA+SdtkbFhHjTMYH+dEH1y4VpfGdAc2Kw/LK09i9bXd/K6j6PkDcFCEzb6iVfZMkPRrl/q0e3toqTAJdkIVA==} @@ -1262,12 +822,6 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/lodash.mergewith@4.6.7': - resolution: {integrity: sha512-3m+lkO5CLRRYU0fhGRp7zbsGi6+BZj0uTVSwvcKU+nSlhjA9/QRNfuSGnD2mX6hQA7ZbmcCkzk5h4ZYGOtk14A==} - - '@types/lodash@4.17.7': - resolution: {integrity: sha512-8wTvZawATi/lsmNu10/j2hk1KEP0IvjubqPE3cu1Xz7xfXXt5oCq3SNUz4fMIP4XGF9Ky+Ue2tBA3hcS7LSBlA==} - '@types/node@22.5.4': resolution: {integrity: sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==} @@ -1417,14 +971,397 @@ packages: '@vitest/utils@2.1.1': resolution: {integrity: sha512-Y6Q9TsI+qJ2CC0ZKj6VBb+T8UPz593N113nnUykqwANqhgf3QkZeHFlusgKLTqrnVHbj/XDKZcDHol+dxVT+rQ==} - '@zag-js/dom-query@0.16.0': - resolution: {integrity: sha512-Oqhd6+biWyKnhKwFFuZrrf6lxBz2tX2pRQe6grUnYwO6HJ8BcbqZomy2lpOdr+3itlaUqx+Ywj5E5ZZDr/LBfQ==} + '@zag-js/accordion@0.62.1': + resolution: {integrity: sha512-1lMKuD1GbiMuemOHOu+24BSAAG8iTD6l/4zYrQRBCTsxXzHhWqTtLF7okGgmSAs8iyNfOuWefCfaJJ3BJNSh5A==} + + '@zag-js/accordion@0.74.2': + resolution: {integrity: sha512-0E6LpQgmcbDe12akh2sKYVvk+fwxVUwjVdclj8ntzlkAYy8PNTTbd9kfNB6rX9+lJUXk/Iqb5+Qgy9RjWplnNw==} + + '@zag-js/anatomy@0.62.1': + resolution: {integrity: sha512-1JiPQOyVlO1jHwLTSNJpyfy1R1UYoaVU1mKSUww5+htAuT/1txjs04pr+8vTF/L/UVzNEZZYepB1tTabyb9LYg==} + + '@zag-js/anatomy@0.74.2': + resolution: {integrity: sha512-wIJQGBiSHYB94UA7k7o4a8xbyqDwIQ0dG52xcD7+eV1ypT+dP+BtxQKmS5w06aghNdHs8b4F2hVLDVSdyLqRkQ==} + + '@zag-js/aria-hidden@0.62.1': + resolution: {integrity: sha512-vVV8bwZhNU+AOOf/USEGV/n9zuTID+spHeC9ZAj29ibWAMmaiq2bx4t1kO4v9eKqKXULUBPPrZQ7CX7oiU616A==} + + '@zag-js/aria-hidden@0.74.2': + resolution: {integrity: sha512-F4dkdLZ3Qeu6Er9rwl8IWEAdhAjcOTRKsE1Otoi0SKjcQKlQfLTPutVDVaZQxz1ZNXNlyq3YUgFE+EiwMV7jxA==} + + '@zag-js/auto-resize@0.62.1': + resolution: {integrity: sha512-nznVkAsZGS+L+VhNO8hPnEyvagNhTezkb64SSPa8E49hJHS2DEN3T5hKCx86tDuiCMd0EdjwUCCQq3pnbzbnCQ==} + + '@zag-js/auto-resize@0.74.2': + resolution: {integrity: sha512-h7gR3LCcxgURg/Xz4IoI4ccN/A+u63zNT8HoXjBWezy050YsItj20q2Yelm6ADT9qcbC+a6W83j4C43ejy5Q5w==} + + '@zag-js/avatar@0.62.1': + resolution: {integrity: sha512-J+IRqJlpL4S9ikCQle/FHj6p8uT8Ee/D88u4k7m/An4Ot1FcrfKqfC3INB5YOI+d8hkIQVtEIAC8Yt/s4OzAMg==} + + '@zag-js/avatar@0.74.2': + resolution: {integrity: sha512-Ctk/r+rbA0ZvdORRKyLMvtxxulda5m0moa5NtT2TuZe3WlTmJRYBMlSQzXwfOAXCjQlLp+zkeyyriP441sm9OQ==} + + '@zag-js/carousel@0.62.1': + resolution: {integrity: sha512-0YQ2jJjzaS1zFLVnPBslVKI8/fY2Z6aOrcJbBjxozG27iSS6zEqmbsz3OOtcYJRlB8jLboZutpMBs3PGh5zg5Q==} + + '@zag-js/carousel@0.74.2': + resolution: {integrity: sha512-EAM3hHXlC5HNcT7D2IaX7p4aOaXLaLYkSCJcMqqo9H3CxVWa7Kfw00yTG1Z0HcRNKiPYTBF/W9fqtHwfpa5s+w==} + + '@zag-js/checkbox@0.62.1': + resolution: {integrity: sha512-xiubQLhRXedlZe4Vc6zxaDFWLSpRdGEG0jTrF3OXovYZLN7bmq0iXiYcWqsLa012+2dYN9w5B1zfQQlzf4sk2w==} + + '@zag-js/checkbox@0.74.2': + resolution: {integrity: sha512-bmF1I179fhEvEXBJU5yzHi+W2Oett0PUVUw2GFFzfIn6w0AgZKNIr8+NLtEWR0Wn0UELXNhLnBSqZC3j2jZHlA==} + + '@zag-js/clipboard@0.62.1': + resolution: {integrity: sha512-gEhCGLkAlrgNWkd7ZqF4p4yNKsR54+0YQPevEv7iX9oio8T/F8OWaDmDjA4NsXxqRe6hr5KLJbVp8dYRop30TQ==} + + '@zag-js/clipboard@0.74.2': + resolution: {integrity: sha512-0D7sIBilV+KTMwArmfa73uJUtDbAqHH3g5o3RFJ+RYpAIYQTINGLw13tsmUK7xQ8gd6M8ARAcAv1khZdwO4ykA==} + + '@zag-js/collapsible@0.62.1': + resolution: {integrity: sha512-M4hsuqf6dVra6RvKaxQjgQjZ+iYj3XH84w6QOnt/SXbJauQoE6nfy77RI/A8O2pPuP6uLq0h2E9Eo3ftcbGBoQ==} + + '@zag-js/collapsible@0.74.2': + resolution: {integrity: sha512-wZI57xWU2tlcJDPlQBUyBxg39PUkY12H6MKcf/+1KBOpJiWc87+4HDBCVSt31diUAhJwcbNsYFwJg3BA4vTw7Q==} + + '@zag-js/collection@0.62.1': + resolution: {integrity: sha512-Qg3OvGCvcoeV4u8IcQmNCu4dChRttVyQ9DF8Ab0qlyrjRDF+w8vMAcNcgNqn10/xX4A7B743cz023LooVsW6VA==} + + '@zag-js/collection@0.74.2': + resolution: {integrity: sha512-8Ls5TR1kKPLDucuJJ0kuwJ45jOEzhcVN4T/mbkShUrgRSB4FrcNzwdpMrKqidNz8OrtphgYYcn3xx/gGUIHLRQ==} + + '@zag-js/color-picker@0.62.1': + resolution: {integrity: sha512-GLeADGcoMLcVS+UM6rn/c1BmBgSB2uTc5AWBkuKoH7TktsKo6+T/v3/QZIU7/b69qBAp3/vWZti99Flw42IDdw==} + + '@zag-js/color-picker@0.74.2': + resolution: {integrity: sha512-l5YAxXjSNWT++gCBLkP/qKwdHpNSxZYGdLU5f1fNupFSSZgZlNTz99ArxsypqFrJKTVvPjDSoOhjG4BeKHAAdg==} + + '@zag-js/color-utils@0.62.1': + resolution: {integrity: sha512-uXsEA0xsI4NT7YFwWZldy7LXsk32Ta+41MrckhzbSA766v+bW4sFDUYmJxwLkN4nl1QzlLAlGghhauXmW9Fs8g==} + + '@zag-js/color-utils@0.74.2': + resolution: {integrity: sha512-yoavZXIGuVXi/QCO89hBu9Ni0xH7o8ZcDFX/CIjr3S9sI3fjNak9efkfPFx0Ooo9x0Fvz8mpu+DPB2uk7voXDA==} + + '@zag-js/combobox@0.62.1': + resolution: {integrity: sha512-EovqyFqD61YmYJYc42qKH2OE7GxMm3gamWLU/lvZe/3eyZt6TsxFe2xeP7WSsvq2v90myMajAnUb0DOpvYaCKw==} + + '@zag-js/combobox@0.74.2': + resolution: {integrity: sha512-NqG2R01NjZz5a3hGYI0Ok7MNM7OkMlYlTI5fJXDgkkhgXi5Mk87R4+AQNjP7xzSEI35rSlVIJt4ecW59cklHng==} + + '@zag-js/core@0.62.1': + resolution: {integrity: sha512-ZSjqnV5vcGDassjmZ/lxWbG244A0i+IHImVZ/a4/0JkjkH126ly+At4FC+HI571pNKiNlrqYmGzRRSBMqm37yQ==} + + '@zag-js/core@0.74.2': + resolution: {integrity: sha512-UAnj9WJdFYeTxbwSCdX8zRYhtLvlJqfqy9cn2NEugpxf+9W/GA89JzH1ZdKLyVJUIuIPoqGd4ZaAgv2p64DZ1Q==} + + '@zag-js/date-picker@0.62.1': + resolution: {integrity: sha512-Wl6yzMtrTy7XgDFbYJaRO8M5dkxLPBvAo3ilDvFBicbJViJCZ9pg1AJYh+xGaK/gfAd7O9wBdYJdHxfESlmlDg==} + + '@zag-js/date-picker@0.74.2': + resolution: {integrity: sha512-iIVQLEAV1FKM+aB7v7Xk5o4w95vdmJhe3aTYDxc6Xo+Fw4tsdUmg/UtQ1ScPzbagBo8Cut2krvkG/8SrlCRMyw==} + peerDependencies: + '@internationalized/date': '>=3.0.0' + + '@zag-js/date-utils@0.62.1': + resolution: {integrity: sha512-YBqT5YRtHOCDS2IcCZtrq7BfzBkU5c+Sc2pVTncf06/3jxjE6l6YbBncMPu5a3uWKjNld1wOTFszhSoPKZfrJA==} + peerDependencies: + '@internationalized/date': '>=3.0.0' + + '@zag-js/date-utils@0.74.2': + resolution: {integrity: sha512-Pb7FggFOTzCTyo+ap4uuA6vBFrYDSoqW/sMI6kwuOBWL3IjFKfmmSVaem6MYycycWN8d7on4IOp0muWCcJaDUQ==} + peerDependencies: + '@internationalized/date': '>=3.0.0' + + '@zag-js/dialog@0.62.1': + resolution: {integrity: sha512-7YRvWZ9UMUjFz0q537/uaTMBljLimWISfVHkUSa2ngbXB8LPYYbqYv5Vio2rvRFqy3nJR3HTO4cGZJGDjO655g==} + + '@zag-js/dialog@0.74.2': + resolution: {integrity: sha512-2kYqFSqpa1SShS2Z8cVqtoOML3uiwIUOZxNVKIea3ItdlMTqHJI+X/NudFvgpUvVj4NMLbUhwIgC+jN2SWDL7g==} + + '@zag-js/dismissable@0.62.1': + resolution: {integrity: sha512-muGTBISpjQEWLCrsYa9wAFaGXlVxYtyMaDgpcPpQdQPwZF86b445y4d8h9FjwkESdJ6Zcdjn21pu5CWD28T3uQ==} + + '@zag-js/dismissable@0.74.2': + resolution: {integrity: sha512-Wl6n1lW1eTKKu5Kg+6jum9nZDXYGA86XL24Rip2aOScrAo2UGCA+nSIgg7GGO5qGs52iawITba38tAe6maZQLw==} + + '@zag-js/dom-event@0.62.1': + resolution: {integrity: sha512-/+okVW69Xdoot7dutJVMz0iciwWM6DvAeLWr7LB5DZsUQMu93oqV/8BE2JArDxEcg5C208HNThGStcWlTaddgA==} + + '@zag-js/dom-event@0.74.2': + resolution: {integrity: sha512-duuwxowflkY7UUI+1vHr9ttzqn2JtJ+mgAS0cBeNmqtvK6XrNaHSrm0B4u4R/KIJ73Tx8TZGbmgN9Vwj/U2FrA==} + + '@zag-js/dom-query@0.62.1': + resolution: {integrity: sha512-sI/urNd3QX/WI7Sii+X1Z/OTWNisn7EaW3T0X9Rbn41u79DC4KeUnP+wpIq1igSJNH2zQWIWBLJ1OGhAjuSl5g==} + + '@zag-js/dom-query@0.74.2': + resolution: {integrity: sha512-g+7wIdhT1nlY5yhQWoMDzin8uYoBXeQk8TNUj3GZH/1dTX9YC0vs7vz7rotzunLkEA7nEGfLNzdO5CRQBhGp0w==} + + '@zag-js/editable@0.62.1': + resolution: {integrity: sha512-BkPLV8T9ixdhz3IxvseV24a1pBNmYhR1np+JUKap0C8thtFbDoF361haEQjCqTCfHDv+j5l1rtq/+H/TF3eEIg==} + + '@zag-js/editable@0.74.2': + resolution: {integrity: sha512-SXSaAmaqvhFPJmX25tz487skF+GTIGAmp0Y6iHqDUsUh0qDncUPOr90M+kFtud/6YUPFkzEpKQtmeczJR0LURw==} + + '@zag-js/element-rect@0.62.1': + resolution: {integrity: sha512-SefRp1IeiENoUkl7yxGzUIdxtQqgKlI+G1qlgx9MZgchH2VZCpqi+EuZgLEKzz7REMabOYqbgs6EEIxGIyNueg==} + + '@zag-js/element-rect@0.74.2': + resolution: {integrity: sha512-FK+bQ4nhdcR52868uE0rlmmq4+un5P++WDoe6S4Aldrx7FJS2XTtWMBdx09zBe48DvUYjGM4o3RbuxbCQoD0BQ==} + + '@zag-js/element-size@0.62.1': + resolution: {integrity: sha512-QCtVeIJ611hJPorKEkdfrWWcMohadplZoW8xQW/2PLSmKUhTNLfHsZLyeoYKyj5Jk4X8OAN4onnMVETFw232EA==} + + '@zag-js/element-size@0.74.2': + resolution: {integrity: sha512-mqw0PLdPs17zrolZBlsYby5kUfo8+QpaU/HAVQavnHQZwNiX4CRBvG1YeqSP699Mvh6QDKc0JhchwAfh+eGGnA==} + + '@zag-js/file-upload@0.62.1': + resolution: {integrity: sha512-Wh33acYMJLNRIV2y0GdSZqoN3aX/t/uzIBWh3rVsN7tpjDYWXLYIsXQttkGLFf0sgICK+3PVD+LLaIpiGDh4+Q==} + + '@zag-js/file-upload@0.74.2': + resolution: {integrity: sha512-VVko5ojAoRQsalsaQxGwTt5ONcYaHuw6yFL/HqPKNzGyyG8zPBl0nRGMSZQprTyUSMDtsxNp8ydL1cbauDqU3Q==} + + '@zag-js/file-utils@0.62.1': + resolution: {integrity: sha512-p363S2pqz29wf1shcSfoY2GI9wWrJkKamNiwuehqoYFh2b8isrcWFVL3VYxm937N1/m5+rtMATQbn0a9j9sggA==} + + '@zag-js/file-utils@0.74.2': + resolution: {integrity: sha512-pTyU33Ag7533X3/RCBvfbGBHFMncg5x2/3n1htSxEVLIIHxbRvvDab2IN550n3OIjMzBLfM4xjNu1R87ed2hlQ==} + + '@zag-js/focus-visible@0.74.2': + resolution: {integrity: sha512-CO5x3uCGKgigQ91S3c3vy/KEKyXK+eTveIzprFTxlQs4Zu2qMe/nJCIhIkG54fhvW/a5F9wY7Ox1f8hGZ1Z1fA==} + + '@zag-js/form-utils@0.62.1': + resolution: {integrity: sha512-GJWRRtEpro8TNEUuEWMhIOWmVFXqiHNTTrrRLxijxUIWbsPrPdPiKL7qwBAESYoZQCmN0hU99S0w2Xmm7Q05Zg==} + + '@zag-js/form-utils@0.74.2': + resolution: {integrity: sha512-LPaZfDhQmhyL4fMMKm4gZGUg6vwcSzaHOVlQHdAR8yoSqvO4yXEdxr2xz4civNNknD0crST2erfexGHEY6Oa/Q==} + + '@zag-js/highlight-word@0.74.2': + resolution: {integrity: sha512-zuy2E62F/w3G94kh4l3iEAtNxqwuAdNkMwcS8EhT6mnzNcf4BgzW7Ne+O1fRb4IGazZwHPaWC0rYdTUuErK9IA==} - '@zag-js/element-size@0.10.5': - resolution: {integrity: sha512-uQre5IidULANvVkNOBQ1tfgwTQcGl4hliPSe69Fct1VfYb2Fd0jdAcGzqQgPhfrXFpR62MxLPB7erxJ/ngtL8w==} + '@zag-js/hover-card@0.62.1': + resolution: {integrity: sha512-ryiNHQmmHpiDiZ5nuk9nvGUgnT017q8hYf+wLSI5OJ+klHPjrHObb7I7v/fUmKzWNtIOhaL0uw9afzjRt3bLEw==} - '@zag-js/focus-visible@0.16.0': - resolution: {integrity: sha512-a7U/HSopvQbrDU4GLerpqiMcHKEkQkNPeDZJWz38cw/6Upunh41GjHetq5TB84hxyCaDzJ6q2nEdNoBQfC0FKA==} + '@zag-js/hover-card@0.74.2': + resolution: {integrity: sha512-Li1lrePYcTC2UqAP/oOLcK4syyBomTmAp06CJukSXpGcYe6qUD4CqhLva0qYjvZ6SKaB9Y3BryD9RTjFvZOuZA==} + + '@zag-js/i18n-utils@0.62.1': + resolution: {integrity: sha512-ipzx0W6VK5x+w/PnUrN8z5SULJuLqvdzsPVBJ2iGHrMcTPC/y9JDt82nJV9fUYmG898pOZUx7vysfLLPNEAFTQ==} + + '@zag-js/i18n-utils@0.74.2': + resolution: {integrity: sha512-S+ZJ3OFUtCoUdKkDfiF3sgXr+98rhVV+BmHgfAiEYRQA7RjeHDB3jX+eBbeZTLFqvHGtaGWCRE3knto3Ed7YvQ==} + + '@zag-js/interact-outside@0.62.1': + resolution: {integrity: sha512-V5N+kr2Uv97HWYL0U5ZVS//NMQu87XGLtI7Ae5EtHrdAEKxO2NpPwf50Gzza4zc1VEVYYFqobTlkNQ3hrrL6VQ==} + + '@zag-js/interact-outside@0.74.2': + resolution: {integrity: sha512-58ilkSC2UQw9PsFo4HKBrYcWC1+WRA8M6MqNf9MnxxOvAq+Y8APH0I6ExxAfUhLsk9v+6kPf0txM9MDoB0iNFA==} + + '@zag-js/live-region@0.62.1': + resolution: {integrity: sha512-Giu7d5UWc2Sqb3/T0tSzqSwxJ4mVrNN+MTu06J7EaD4khK5RgX4GRpQ9rpwOS/GJT+8nc6YBhWTi7tqKN/+iHQ==} + + '@zag-js/live-region@0.74.2': + resolution: {integrity: sha512-l9cipG1hykvSWIbKc3/3imFQ+Sp3u2VjZirmdM2K9julo7DKxU3r63aQI2s6SpD4tfQPq4AcoXqzZBF0RnQRUg==} + + '@zag-js/menu@0.62.1': + resolution: {integrity: sha512-l/PartHj6//NMlENYNBmUmeYG9K0SbjbnnIudv+rK+oyrUoX/MDCJ7bdy7ZMYxWTR127WdZlLHBxsgMe86lBqQ==} + + '@zag-js/menu@0.74.2': + resolution: {integrity: sha512-dEBerxdgPH4dGWEbFM/aY8zYtUAQL7hopv4iWPHv1NMFhWu7IShmKe2xm/4ZSKaOkiWZ2CJyMI0oHr+qQ+ZhWg==} + + '@zag-js/number-input@0.62.1': + resolution: {integrity: sha512-THizFB4Qwq4erMk6mI82voIo/PbbrAOSQXyPF8NPyGupSzqYntS1XPEdyqFH677PhHweelxQnvtZEm5alm1HLw==} + + '@zag-js/number-input@0.74.2': + resolution: {integrity: sha512-I889jfoTiKX9gRbyZCgKDyBj1+VgFCKyELbFVJhgnWQhip4MLjzI0YjWWp/wgHs/ZwEBaxvy6CRq1KOtvGkQNg==} + + '@zag-js/number-utils@0.62.1': + resolution: {integrity: sha512-ktnGSYKKLG9No14ivlboEzq4+jiOIWU+8yeoRrZmfdCG58g4s9JF0lBDRf3ts9vhUdofJ+vUFMPqkk2eCWyQlA==} + + '@zag-js/number-utils@0.74.2': + resolution: {integrity: sha512-wNaixDQDotwUUKtpA524tfDvsiQQroDFZYFPZfnwKq89rPT0Zlh8LMKLL/Mfi32Zqp8UP9srdcMEy1XEGrIiNA==} + + '@zag-js/numeric-range@0.62.1': + resolution: {integrity: sha512-R4/II5MvS+eJ880srPuIlexqRH7kVsGomcsDlB5yyhHsradm7OJfC5L6osvKj1DNAitfFh8901BZFaWmQe8O1w==} + + '@zag-js/numeric-range@0.74.2': + resolution: {integrity: sha512-sm2xlc03Zy4DdCRNmr7jUgL9s34rK0bVDezn35TCq3QMPWQndIlsCbywcmxqxxtUymwnSwizWenZaWVlspFlgg==} + + '@zag-js/pagination@0.62.1': + resolution: {integrity: sha512-fyDXNnAGyRsQEugvNR1kfEO8hGeesOV6l2rEACdvNN6G9Cqktqd52aaWVIf805G3Ig72igW2SybI9md/rDflzQ==} + + '@zag-js/pagination@0.74.2': + resolution: {integrity: sha512-aqwqxHrgvfm6rAcxIRTSRFaWA9/UCQGiLP+P01Hg+/+IkVJeJIdxN10ImSOL4Sl9zvsu3jVCXg6xLh4ydnaWqw==} + + '@zag-js/pin-input@0.62.1': + resolution: {integrity: sha512-CTAOyQCLaNSWH29bhc4XruEkvnYFJN1QF/x5axtHV+cir05zcdB3L7Sna4D6nUBSwd0tOGnUmPlviyP7zkpgBA==} + + '@zag-js/pin-input@0.74.2': + resolution: {integrity: sha512-1LxK/VSU8t9w9/c726gZNHXnawU4SnN0DxsYQa4YxN1RDwFp/RwKp0hLAsunKx7yBdyC1VVyqVPYZdZYQotdQg==} + + '@zag-js/popover@0.62.1': + resolution: {integrity: sha512-cT6okb5Yq69YWx6G1vonNEnEg4MlBXRbXLflLBqOP1PTwhk6RwlndXGV2uCdlnR0mUJa/RKldzdUcwOQesJaag==} + + '@zag-js/popover@0.74.2': + resolution: {integrity: sha512-B0U2/XJ630kWYY1x5UTBMxXjy2EfbH9T3eRQlSXZS/uToEzvhsYvw/YaIRqyvYxPyrnmB72to6r5Kw8omryw4Q==} + + '@zag-js/popper@0.62.1': + resolution: {integrity: sha512-tyLEdYIsv3cgnWCWzPPv9f72hzmQDQcObDIczIZt+OQr89qgyhGHt5jR1f0Qxsz9zZlSPsEftccyXRQYInQtxQ==} + + '@zag-js/popper@0.74.2': + resolution: {integrity: sha512-gsS32rxw+bSKOLOtF/VPNNafzO/fEU58OYIfM7yA4swrEupUqdfAF/ihNH+Uj/AZQKj2tnwLTR1fJ1w3czpY9w==} + + '@zag-js/presence@0.62.1': + resolution: {integrity: sha512-qjnr1WpW5yetRp2j2V0ocRvr6X6TuWNxjL2DyJAusodcsSElF2V0UuFOLT/xIZA8BVIbgcyCvcPB01PHugC5Ww==} + + '@zag-js/presence@0.74.2': + resolution: {integrity: sha512-57eBd5C205jYUQ7Rsbft9YRy4euNDdxKDpdLdInqk8egf2vFaUWIV152pm5iOGRVidDGgcIunTFvHFCT1rbATQ==} + + '@zag-js/progress@0.62.1': + resolution: {integrity: sha512-7FyeP/wCiJ2dao1y/4RzhrLeIse305YtRMTDaVE5EnOJK3nit2Rrl+z8kGx5aqrGQcGsLH/rh5QYFp689Nx57Q==} + + '@zag-js/progress@0.74.2': + resolution: {integrity: sha512-4LNhFP18g21ni4Hv0RQEYqcMhiyMzTsu1IPizy8I5l3mJvsV6b7w591K2iC4mybLynPBodvkl+fLKqKm3Z+1IA==} + + '@zag-js/qr-code@0.62.1': + resolution: {integrity: sha512-648qXQduIqq4CZWN07D1UOcczZrdp3UjBSHFEi4PQHTz1Vg08pH0BIZDqiqpupG9niYJEB/GPLGofRQQYoIoDw==} + + '@zag-js/qr-code@0.74.2': + resolution: {integrity: sha512-GHAmnHz9pdaP0c5/n4aPSo67Bk1Cvv9PIHuOKKLyalRcao/ARvtiS6371logfB1l9DHtMSARWaZXDrIdNZx/gQ==} + + '@zag-js/radio-group@0.62.1': + resolution: {integrity: sha512-VVGTUkHgD27vBTYeP7hPYi+eDRXkq7xtlv6Ml062t3gcTWBhc/2eaI6iZ7awlxTl9052sflzbawrrDysPREuAQ==} + + '@zag-js/radio-group@0.74.2': + resolution: {integrity: sha512-Ntbi21CTqXIVMrGccVSefwCapACSTOy4XFDM9/piTLeRlfmNxsy7j9hl7EFBpovbe4WYLHjNQaL3MM+hXEjRRQ==} + + '@zag-js/rating-group@0.62.1': + resolution: {integrity: sha512-gXvHofr3gfZcaMh7Y3FU+wyj7ge1R0BgsuPJWFUShlAlxjnnE7e3AqjSGlzuvpkWMkc6KKDyKRJlMVWLCv94OA==} + + '@zag-js/rating-group@0.74.2': + resolution: {integrity: sha512-Yg30ph9YMy5g6TPHU9MD/NEheaz8qOsVVkefG2EvRVfO2ZGXJclqZHg8/TW8nzUGp3Mt/fAq0A7jfeKvJBcNng==} + + '@zag-js/react@0.74.2': + resolution: {integrity: sha512-Vli4cigN032dQM69Dr/2rR5FSSN+rzYJUiqAo3oNyvvWSSsgLvoNYcfltxY0Tah/PIftbF1NBUNYaUtv3OJgzA==} + peerDependencies: + react: '>=18.0.0' + react-dom: '>=18.0.0' + + '@zag-js/rect-utils@0.62.1': + resolution: {integrity: sha512-6w56LuRD382Oa2FXi4AfKQqgtUPS/nc/mZzXiaqKz9b5aFA1CXtmEwNC2GaiXhkqJp5DyxHwujDfQP1WXACnRQ==} + + '@zag-js/rect-utils@0.74.2': + resolution: {integrity: sha512-bG48u1NsWBRx/fTQfDFskOkwKzpROVhuzJQi/OtnugHQj1nFfZ5CMpEX3BPv5PGwvOia2ZDm84+e12WucFhr0g==} + + '@zag-js/remove-scroll@0.62.1': + resolution: {integrity: sha512-7xpX6HUrOEq/TNLIWojYnQf7kj20bk8ueOKpu7cTZmoN0LSL6cS09uil+NOqb+SzZsiRmQKvzd3fQBNwbdab5Q==} + + '@zag-js/remove-scroll@0.74.2': + resolution: {integrity: sha512-aEuspeZ98eAEGlAfnCh0syzbmFnMuov9yJc8Ud//pbXd+96J7X1xa4TilHZ+ppm8IZZLGc91axXHwkbIINC1Qw==} + + '@zag-js/select@0.62.1': + resolution: {integrity: sha512-dgU65imBSeB8+QfHkN68j7Xqd/d6wsF42itJ0AeRSdgnCHgTWdN9rRCK5EDbNkJue51oMkdsnJ7XG1k+oCgiAg==} + + '@zag-js/select@0.74.2': + resolution: {integrity: sha512-Xl3CV1ONpQG3Ah0sriiuCFHXuiuzYz9KQkmEapcE+O5kzLADheIvgFRQYiMdLCltEfSBSBJvPrRM08aGyQgSpw==} + + '@zag-js/signature-pad@0.62.1': + resolution: {integrity: sha512-hWZSWT9J9V1kbImkj8qXHCqS0TYm7nms9oAhcQ2QNIiGO38wqW8Yswos8sqAj8VtzHxkSMIeL1by7Zgy3Xjq9g==} + + '@zag-js/signature-pad@0.74.2': + resolution: {integrity: sha512-Ba5v3J/fQWVR81wvhHAfwbCjqJlv/15If0SCbKiTA879I5s9dZVYAkvibeGlIEK46SEKbeUpAewE/+eGgrrE6A==} + + '@zag-js/slider@0.62.1': + resolution: {integrity: sha512-v5rgPJF3fh7bBPu0wzEGpN4EcXpK5cSw4OAwxatmbtkYsg2Udwv6WL26CB5Q2zVwYIR6R532b/bjFqicfVs+SA==} + + '@zag-js/slider@0.74.2': + resolution: {integrity: sha512-aDuzKySgFOm/D0opDyQOo2KMWFN9ZHuF50rG5cfIgl9dzo447iJfLzObcpAwNpQJq5P7/q+23K+sh+/NRGoUbw==} + + '@zag-js/splitter@0.62.1': + resolution: {integrity: sha512-Ni93ZaprnbctAsbuot8sEw9DDfNMgkelnd5xQfAiwpgjwUgnY8733LRbWydC5OUPoJ/cCs3XiNKa0CHwclcq6Q==} + + '@zag-js/splitter@0.74.2': + resolution: {integrity: sha512-2PHBD4Y8h4ZcVphMWSDP6EsSamLYburQDDgJTSN5npcWsLu4iZ45T1U8bV0uICs+UJP9fko4YvjABrPv42asLQ==} + + '@zag-js/steps@0.74.2': + resolution: {integrity: sha512-UTf5SIvfIfuAuGxRZLi5zDN3LGWQp2jdycS9JnKwL+KJKeHPmTu4OtXbRc2c4lEO0z1T9sVOHB0GPwBGK4+c3Q==} + + '@zag-js/store@0.62.1': + resolution: {integrity: sha512-0xkz7b/Rs9cHeI5CB3UH4yMlVzys3l+IsJU3KRWZwqWohDjTEqRyzcuFD6AH28WAcJPjIgOQYnRYzYSoMGZtDQ==} + + '@zag-js/store@0.74.2': + resolution: {integrity: sha512-LXCSyIVf6G4SvoPojNxOUiK45Lg1Qo/I8NCIoWAgSh6WgthuyEP05oLlW0OdvfHWLsnSwUjJmfuoRQAQNS3M8Q==} + + '@zag-js/switch@0.62.1': + resolution: {integrity: sha512-uh0yy3NuZqHF+jPVZ2oMcAtPx32eTnBebiROBGBDgj1A5yZBirfQm8j/vZLSILhDq9TdktHS9/gITJ7TvgV4cQ==} + + '@zag-js/switch@0.74.2': + resolution: {integrity: sha512-I5OTZ26Rp3ADySnzpIlI47+BiUAhSIXLMpQQ/T2XCpF9hsrxmAwmpyyJ+48L7aLHtzeEusBOATU+CWMEgycRPQ==} + + '@zag-js/tabs@0.62.1': + resolution: {integrity: sha512-BpY6oA2nmZLpYu8nQrpi+zTF4txTiMYIMB31CmbFmbJ3hMVkEqk8sgNzNQY3LrzkkSemDRBHxPZ5H+YKaQrEdg==} + + '@zag-js/tabs@0.74.2': + resolution: {integrity: sha512-e/xkk4aihOikJsw47Q8nwAm/vUlfgXkkGxydquZg4Z42EDNYt5XcxPpIW+7gbzK+ergDYN7JyHQ4+sIRRPA/jg==} + + '@zag-js/tags-input@0.62.1': + resolution: {integrity: sha512-8gJ4ckQQ0BB3oUGgIEGkmB6wIKSf7xx0q6e3tqTbfZnPhmWP4hpli38XAOYjsBQyNXmQW89H/Rp8/8W1A/Vpow==} + + '@zag-js/tags-input@0.74.2': + resolution: {integrity: sha512-bE1Z2dANz7f734AuX2b4RtGk6/L8Nf44Q1wxCZ++3CLYV6EBBBDb2kNWr/Pz23md58ve+AcFcd/pzKtoXfWcdw==} + + '@zag-js/text-selection@0.62.1': + resolution: {integrity: sha512-0b049CnWN/Nyp/F/nbeU6G8BI/fzwlSQTTDWK81yRFADDFTZ2mWpVAWJF/fY0rKjsn4ucDykCS7GXMIo5rYILQ==} + + '@zag-js/text-selection@0.74.2': + resolution: {integrity: sha512-c7fLss1VyFnQzs5f5HNMD3qyJczH+SNzOgqMwU6apKpgYRLMMI0S4F03I/cyDQrCPZMcy6F/bl6zlQmdqb6WLQ==} + + '@zag-js/time-picker@0.62.1': + resolution: {integrity: sha512-THNASHp9Fu5f4/LC3t3qJfsYD6FqjhbP7HrjIDDFOcdNGRzOTfbEpKF3JtJgmM6F+/fuQKhe6FUbcluMd9zo8Q==} + + '@zag-js/time-picker@0.74.2': + resolution: {integrity: sha512-VSsIQ+RUKo1hC+ip2Hq2jsiBTZNV+cuRntGXvVFegI1VG8W2ug6CtW5ilfxcQte8dfn1s6g2F4TY0g79F7svgA==} + peerDependencies: + '@internationalized/date': '>=3.0.0' + + '@zag-js/timer@0.74.2': + resolution: {integrity: sha512-T8JGTNd9tJXUZqerBSDVsv1upD0vpccZqXZthpl8u4KFXj8vR/CZ9fW00linc+tR9XdxvBbkxk3EW/BPbNvoDw==} + + '@zag-js/toast@0.62.1': + resolution: {integrity: sha512-Kb+OiFx7KUG0fAExIL06xWEfhxeMRJACvP6q4B4FNuFX+6N06RbV/PZtLbPbffOodd7VhSk1W37T7t6Np32mvg==} + + '@zag-js/toast@0.74.2': + resolution: {integrity: sha512-Xt4F1BxP0U15WoNx73gIpnFRkCB3/dRkA5zQPECzR/U2drN2JAoCdb9wNQjxmR/6DWkT4PuCeWliUcskHDY8Wg==} + + '@zag-js/toggle-group@0.62.1': + resolution: {integrity: sha512-h7jQtWJt11uws6IYBd3kQzOyOemtZ5CqR7lt4XZdni3J1EtymKRJNha2JIukIETZS9/0VU1fPcuDkQeCXcGHgQ==} + + '@zag-js/toggle-group@0.74.2': + resolution: {integrity: sha512-4wAsl33rP/LQq052sE+UYn0tfiJtZeINishI+0xrWc7iQY61F6l6UtZUR4I/NT3sa8XV2xSgunVPP4dVN0JlpA==} + + '@zag-js/tooltip@0.62.1': + resolution: {integrity: sha512-318EJU6B4FR0nMNU79qMAgdOiVM6vbDiRWBHjGLDBK3z5No3lKfo4TZb/NqBmmi2W7ZFPiPwvLFsTql+H0xDbA==} + + '@zag-js/tooltip@0.74.2': + resolution: {integrity: sha512-lHs7dp1wUK4B+iY227ZfsQelVcRrad7ZVAh27ZzRdCkUE7KMi8ev45YudbnSM3ltCMGe6F+it7jWcalccFad4w==} + + '@zag-js/tree-view@0.62.1': + resolution: {integrity: sha512-Y7qj16X18uElsD5jA9l03+rKEg1/5JIGRutO+NlEbs9Ffb7y34vqcEWquA+YgDfqXVWk2b5v9xcU1iKuKhOagQ==} + + '@zag-js/tree-view@0.74.2': + resolution: {integrity: sha512-oLizz/iU5xj3KCIy/oADAIhs2NjLl3LawYYktD8k02JF2XRPi8bDnRF96E0YPeLOnf5XdEEXmDD2A1l+gWrReg==} + + '@zag-js/types@0.62.1': + resolution: {integrity: sha512-wjJvasoxg/rsFhMTaGLJEjYnSGaXz7DymtO+wWOIfa+O6y44flHc8wRQ1l6ZRRetCz4RALTuwhZI+0ESZ1Bpwg==} + + '@zag-js/types@0.74.2': + resolution: {integrity: sha512-UYdHh5Jj2LZZwP8Amm9YEoj9f/zYNWuuw+HRGCLZew6moHvKj/HHKJHLooPPjGztFIeRsnicE1mJ9E2bDllBaw==} + + '@zag-js/utils@0.62.1': + resolution: {integrity: sha512-90sk7Li2mqoMCAfZbns1xrySEg4PIFPwLpiRO/T2kvKpc9z/qsq2WqDFpS8eqHfYRmkLnmQa0Bw1LzItYYsGVQ==} + + '@zag-js/utils@0.74.2': + resolution: {integrity: sha512-WtIsNyDvnslCjtIIP/bRzx3bJMaT0cIgI3f+TgiFWhtQMlUZMpBkwkKVfvUwI5qcZ+ZOMeoonAWFqFECCb3h3g==} acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} @@ -1598,17 +1535,11 @@ packages: resolution: {integrity: sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==} engines: {node: '>=12'} - chakra-react-select@4.9.2: - resolution: {integrity: sha512-uhvKAJ1I2lbIwdn+wx0YvxX5rtQVI0gXL0apx0CXm3blIxk7qf6YuCh2TnGuGKst8gj8jUFZyhYZiGlcvgbBRQ==} + chakra-react-select@6.0.0-next.2: + resolution: {integrity: sha512-QSylYpdvz1WHBXZ6Rxd47FnvWQlexAx+c0KtyW5PAVBrafXJhqhprHnuNsbqHiEI3I8zJHiaMGWzvGVD7v8LQQ==} peerDependencies: - '@chakra-ui/form-control': ^2.0.0 - '@chakra-ui/icon': ^3.0.0 - '@chakra-ui/layout': ^2.0.0 - '@chakra-ui/media-query': ^3.0.0 - '@chakra-ui/menu': ^2.0.0 - '@chakra-ui/spinner': ^2.0.0 - '@chakra-ui/system': ^2.0.0 - '@emotion/react': ^11.8.1 + '@chakra-ui/react': 3.x + next-themes: ^0.3.0 react: ^18.0.0 react-dom: ^18.0.0 @@ -1663,9 +1594,6 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - color2k@2.0.3: - resolution: {integrity: sha512-zW190nQTIoXcGCaU08DvVNFTmQhUpnJfVuAKfWqUQkflXKpaDdpaYoM0iluLS9lgJNHyBF58KKA2FBEwkD7wog==} - combined-stream@1.0.8: resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} engines: {node: '>= 0.8'} @@ -1674,9 +1602,6 @@ packages: resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} engines: {node: '>=18'} - compute-scroll-into-view@3.0.3: - resolution: {integrity: sha512-nadqwNxghAGTamwIqQSG433W6OADZx2vCo3UXHNrzTRHK/htu+7+L0zhjEoaeaQVNAi3YgqWDv8+tzf0hRfR+A==} - concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} @@ -1690,9 +1615,6 @@ packages: convert-source-map@1.9.0: resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} - copy-to-clipboard@3.3.3: - resolution: {integrity: sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==} - core-js-compat@3.38.1: resolution: {integrity: sha512-JRH6gfXxGmrzF3tZ57lFx97YARxCXPaMzPo6jELZhv88pBH5VXpQ+y0znKGlFnzuaihqhLbefxSJxWJMPtfDzw==} @@ -1704,9 +1626,6 @@ packages: resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} engines: {node: '>= 8'} - css-box-model@1.2.1: - resolution: {integrity: sha512-a7Vr4Q/kd/aw96bnJG332W9V9LkJO69JRcaCYDUqjp6/z0w6VcZjgAcTbgFxEPfBgdnAwlh3iwu+hLopa+flJw==} - css.escape@1.5.1: resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==} @@ -1782,9 +1701,6 @@ packages: destr@2.0.3: resolution: {integrity: sha512-2N3BOUU4gYMpTP24s5rF5iP7BDr7uNTCs4ozw3kf/eKfvWSIu93GEBi5m427YoyJoeOzQ5smuu4nNAPGb8idSQ==} - detect-node-es@1.1.0: - resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} - dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} @@ -2038,9 +1954,11 @@ packages: flatted@3.3.1: resolution: {integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==} - focus-lock@1.3.5: - resolution: {integrity: sha512-QFaHbhv9WPUeLYBDe/PAuLKJ4Dd9OPvKs9xZBr3yLXnUrDNaVXKu2baDBXe3naPY30hgHYSsf2JW4jzas2mDEQ==} - engines: {node: '>=10'} + focus-trap@7.5.4: + resolution: {integrity: sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==} + + focus-trap@7.6.0: + resolution: {integrity: sha512-1td0l3pMkWJLFipobUcGaf+5DTY4PLDDrcqoSaKP8ediO/CoWCCYk/fT/Y2A4e6TNB+Sh6clRJCjOPPnKoNHnQ==} follow-redirects@1.15.9: resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} @@ -2062,23 +1980,6 @@ packages: resolution: {integrity: sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==} engines: {node: '>= 6'} - framer-motion@11.3.29: - resolution: {integrity: sha512-uyDuUOeOElJEA3kbkbyoTNEf75Jih1EUg0ouLKYMlGDdt/LaJPmO+FyOGAGxM2HwKhHcAoKFNveR5A8peb7yhw==} - peerDependencies: - '@emotion/is-prop-valid': '*' - react: ^18.0.0 - react-dom: ^18.0.0 - peerDependenciesMeta: - '@emotion/is-prop-valid': - optional: true - react: - optional: true - react-dom: - optional: true - - framesync@6.1.2: - resolution: {integrity: sha512-jBTqhX6KaQVDyus8muwZbBeGGP0XgujBRbQ7gM7BRdS3CadCZIHiawyzYLnafYcvZIh5j8WE7cxZKFn7dXhu9g==} - fs-minipass@2.1.0: resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} engines: {node: '>= 8'} @@ -2105,10 +2006,6 @@ packages: resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==} engines: {node: '>= 0.4'} - get-nonce@1.0.1: - resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} - engines: {node: '>=6'} - get-stream@8.0.1: resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} engines: {node: '>=16'} @@ -2236,9 +2133,6 @@ packages: resolution: {integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==} engines: {node: '>= 0.4'} - invariant@2.2.4: - resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} - is-arguments@1.1.1: resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} engines: {node: '>= 0.4'} @@ -2442,6 +2336,10 @@ packages: keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + klona@2.0.6: + resolution: {integrity: sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA==} + engines: {node: '>= 8'} + language-subtag-registry@0.3.23: resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==} @@ -2464,12 +2362,12 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} engines: {node: '>=10'} + lodash.debounce@4.0.8: + resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - lodash.mergewith@4.6.2: - resolution: {integrity: sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==} - lodash@4.17.21: resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} @@ -2594,6 +2492,12 @@ packages: neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + next-themes@0.3.0: + resolution: {integrity: sha512-/QHIrsYpd6Kfk7xakK4svpDI5mmXP0gfvCoJdGpZQ2TOrQZmsW0QxjaiLn8wbIKjtm4BTSqLoix4lxYYOnLJ/w==} + peerDependencies: + react: ^16.8 || ^17 || ^18 + react-dom: ^16.8 || ^17 || ^18 + node-fetch-native@1.6.4: resolution: {integrity: sha512-IhOigYzAKHd244OC0JIMIUrjzctirCmPkaIfhDeGcEETWof5zKYUW7e7MYvChGWh/4CJeXEgsRyGzuF334rOOQ==} @@ -2730,6 +2634,9 @@ packages: perfect-debounce@1.0.0: resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} + perfect-freehand@1.2.2: + resolution: {integrity: sha512-eh31l019WICQ03pkF3FSzHxB8n07ItqIQ++G5UV8JX0zVOXzgTGCqnRR0jJ2h9U8/2uW4W4mtGJELt9kEV0CFQ==} + picocolors@1.1.0: resolution: {integrity: sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==} @@ -2776,9 +2683,15 @@ packages: prop-types@15.8.1: resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + proxy-compare@3.0.0: + resolution: {integrity: sha512-y44MCkgtZUCT9tZGuE278fB7PWVf7fRYy0vbRXAts2o5F0EfC4fIQrvQQGBJo1WJbFcVLXzApOscyJuZqHQc1w==} + proxy-from-env@1.1.0: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + proxy-memoize@3.0.1: + resolution: {integrity: sha512-VDdG/VYtOgdGkWJx7y0o7p+zArSf2383Isci8C+BP3YXgMYDoPd3cCBjw0JdWb6YBb9sFiOPbAADDVTPJnh+9g==} + punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} @@ -2789,28 +2702,11 @@ packages: rc9@2.1.2: resolution: {integrity: sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==} - react-clientside-effect@1.2.6: - resolution: {integrity: sha512-XGGGRQAKY+q25Lz9a/4EPqom7WRjz3z9R2k4jhVKA/puQFH/5Nt27vFZYql4m4NVNdUvX8PS3O7r/Zzm7cjUlg==} - peerDependencies: - react: ^15.3.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 - react-dom@18.3.1: resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} peerDependencies: react: ^18.3.1 - react-fast-compare@3.2.2: - resolution: {integrity: sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==} - - react-focus-lock@2.12.1: - resolution: {integrity: sha512-lfp8Dve4yJagkHiFrC1bGtib3mF2ktqwPJw4/WGcgPW+pJ/AVQA5X2vI7xgp13FcxFEpYBBHpXai/N2DBNC0Jw==} - peerDependencies: - '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - react-icons@5.3.0: resolution: {integrity: sha512-DnUk8aFbTyQPSkCfF8dbX6kQjXA9DktMeJqfjrg6cK9vwQVMxmcA3BfP4QoiztVmEHtwlTgLFsPuH2NskKT6eg==} peerDependencies: @@ -2822,26 +2718,6 @@ packages: react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} - react-remove-scroll-bar@2.3.6: - resolution: {integrity: sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - - react-remove-scroll@2.5.10: - resolution: {integrity: sha512-m3zvBRANPBw3qxVVjEIPEQinkcwlFZ4qyomuWVpNJdv4c6MvHfXV0C3L9Jx5rr3HeBHKNRX+1jreB5QloDIJjA==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - react-router-dom@6.26.2: resolution: {integrity: sha512-z7YkaEW0Dy35T3/QKPYB1LjMK2R1fxnHO8kWpUMTBdfVzZrWOiY9a7CtN8HqdWtDUWd5FY6Dl8HFsqVwH4uOtQ==} engines: {node: '>=14.0.0'} @@ -2855,22 +2731,12 @@ packages: peerDependencies: react: '>=16.8' - react-select@5.8.0: - resolution: {integrity: sha512-TfjLDo58XrhP6VG5M/Mi56Us0Yt8X7xD6cDybC7yoRMUNm7BGO7qk8J0TLQOua/prb8vUOtsfnXZwfm30HGsAA==} + react-select@5.8.2: + resolution: {integrity: sha512-a/LkOckoI62710gGPQSQqUp7A10fGbH/ya3/IR49qaq3XoBvwymgD5mJgtiHxBDsutyEQfdKNycWVh8Cg8UCjw==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-style-singleton@2.2.1: - resolution: {integrity: sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - react-transition-group@4.4.5: resolution: {integrity: sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==} peerDependencies: @@ -3097,6 +2963,9 @@ packages: resolution: {integrity: sha512-7gr8p9TQP6RAHusBOSLs46F4564ZrjV8xFmw5zCmgmhGUcw2hxsShhJ6CEiHQMgPDwAQ1fWHPM0ypc4RMAig4A==} engines: {node: ^14.18.0 || >=16.0.0} + tabbable@6.2.0: + resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==} + tar@6.2.1: resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} @@ -3108,9 +2977,6 @@ packages: text-table@0.2.0: resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} - tiny-invariant@1.3.3: - resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} - tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} @@ -3137,9 +3003,6 @@ packages: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} - toggle-selection@1.0.6: - resolution: {integrity: sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==} - ts-api-utils@1.3.0: resolution: {integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==} engines: {node: '>=16'} @@ -3149,9 +3012,6 @@ packages: ts-morph@23.0.0: resolution: {integrity: sha512-FcvFx7a9E8TUe6T3ShihXJLiJOiqyafzFKUO4aqIHDUCIvADdGNShcbc2W5PMr3LerXRv7mafvFZ9lRENxJmug==} - tslib@2.4.0: - resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==} - tslib@2.6.3: resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} @@ -3217,19 +3077,12 @@ packages: peerDependencies: browserslist: '>= 4.21.0' + uqr@0.1.2: + resolution: {integrity: sha512-MJu7ypHq6QasgF5YRTjqscSzQp/W11zoUk6kvmlH+fmWEs63Y0Eib13hYFwAzagRJcVY8WVnlV+eBDUGMJ5IbA==} + uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - use-callback-ref@1.3.2: - resolution: {integrity: sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==} - engines: {node: '>=10'} - peerDependencies: - '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - use-debounce@10.0.3: resolution: {integrity: sha512-DxQSI9ZKso689WM1mjgGU3ozcxU1TJElBJ3X6S4SMzMNcm2lVH0AHmyXB+K7ewjz2BSUKJTDqTcwtSMRfB89dg==} engines: {node: '>= 16.0.0'} @@ -3245,15 +3098,11 @@ packages: '@types/react': optional: true - use-sidecar@1.1.2: - resolution: {integrity: sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==} - engines: {node: '>=10'} + usehooks-ts@3.1.0: + resolution: {integrity: sha512-bBIa7yUyPhE1BCc0GmR96VU/15l/9gP1Ch5mYdLcFBaFGQsdmXkvjV0TtOqW1yUd6VjIwDunm+flSciCQXujiw==} + engines: {node: '>=16.15.0'} peerDependencies: - '@types/react': ^16.9.0 || ^17.0.0 || ^18.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - '@types/react': - optional: true + react: ^16.8.0 || ^17 || ^18 validate-npm-package-license@3.0.4: resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} @@ -3403,6 +3252,103 @@ snapshots: '@types/json-schema': 7.0.15 js-yaml: 4.1.0 + '@ark-ui/anatomy@3.5.0(@internationalized/date@3.5.6)': + dependencies: + '@zag-js/accordion': 0.62.1 + '@zag-js/anatomy': 0.62.1 + '@zag-js/avatar': 0.62.1 + '@zag-js/carousel': 0.62.1 + '@zag-js/checkbox': 0.62.1 + '@zag-js/clipboard': 0.62.1 + '@zag-js/collapsible': 0.62.1 + '@zag-js/color-picker': 0.62.1 + '@zag-js/color-utils': 0.62.1 + '@zag-js/combobox': 0.62.1 + '@zag-js/date-picker': 0.62.1 + '@zag-js/date-utils': 0.62.1(@internationalized/date@3.5.6) + '@zag-js/dialog': 0.62.1 + '@zag-js/editable': 0.62.1 + '@zag-js/file-upload': 0.62.1 + '@zag-js/hover-card': 0.62.1 + '@zag-js/menu': 0.62.1 + '@zag-js/number-input': 0.62.1 + '@zag-js/pagination': 0.62.1 + '@zag-js/pin-input': 0.62.1 + '@zag-js/popover': 0.62.1 + '@zag-js/presence': 0.62.1 + '@zag-js/progress': 0.62.1 + '@zag-js/qr-code': 0.62.1 + '@zag-js/radio-group': 0.62.1 + '@zag-js/rating-group': 0.62.1 + '@zag-js/select': 0.62.1 + '@zag-js/signature-pad': 0.62.1 + '@zag-js/slider': 0.62.1 + '@zag-js/splitter': 0.62.1 + '@zag-js/switch': 0.62.1 + '@zag-js/tabs': 0.62.1 + '@zag-js/tags-input': 0.62.1 + '@zag-js/time-picker': 0.62.1 + '@zag-js/toast': 0.62.1 + '@zag-js/toggle-group': 0.62.1 + '@zag-js/tooltip': 0.62.1 + '@zag-js/tree-view': 0.62.1 + transitivePeerDependencies: + - '@internationalized/date' + + '@ark-ui/react@4.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@internationalized/date': 3.5.6 + '@zag-js/accordion': 0.74.2 + '@zag-js/anatomy': 0.74.2 + '@zag-js/avatar': 0.74.2 + '@zag-js/carousel': 0.74.2 + '@zag-js/checkbox': 0.74.2 + '@zag-js/clipboard': 0.74.2 + '@zag-js/collapsible': 0.74.2 + '@zag-js/collection': 0.74.2 + '@zag-js/color-picker': 0.74.2 + '@zag-js/color-utils': 0.74.2 + '@zag-js/combobox': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/date-picker': 0.74.2(@internationalized/date@3.5.6) + '@zag-js/date-utils': 0.74.2(@internationalized/date@3.5.6) + '@zag-js/dialog': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/editable': 0.74.2 + '@zag-js/file-upload': 0.74.2 + '@zag-js/file-utils': 0.74.2 + '@zag-js/highlight-word': 0.74.2 + '@zag-js/hover-card': 0.74.2 + '@zag-js/i18n-utils': 0.74.2 + '@zag-js/menu': 0.74.2 + '@zag-js/number-input': 0.74.2 + '@zag-js/pagination': 0.74.2 + '@zag-js/pin-input': 0.74.2 + '@zag-js/popover': 0.74.2 + '@zag-js/presence': 0.74.2 + '@zag-js/progress': 0.74.2 + '@zag-js/qr-code': 0.74.2 + '@zag-js/radio-group': 0.74.2 + '@zag-js/rating-group': 0.74.2 + '@zag-js/react': 0.74.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@zag-js/select': 0.74.2 + '@zag-js/signature-pad': 0.74.2 + '@zag-js/slider': 0.74.2 + '@zag-js/splitter': 0.74.2 + '@zag-js/steps': 0.74.2 + '@zag-js/switch': 0.74.2 + '@zag-js/tabs': 0.74.2 + '@zag-js/tags-input': 0.74.2 + '@zag-js/time-picker': 0.74.2(@internationalized/date@3.5.6) + '@zag-js/timer': 0.74.2 + '@zag-js/toast': 0.74.2 + '@zag-js/toggle-group': 0.74.2 + '@zag-js/tooltip': 0.74.2 + '@zag-js/tree-view': 0.74.2 + '@zag-js/types': 0.74.2 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + '@babel/code-frame@7.24.7': dependencies: '@babel/highlight': 7.24.7 @@ -3437,7 +3383,7 @@ snapshots: '@babel/helper-module-imports@7.24.7': dependencies: '@babel/traverse': 7.25.4 - '@babel/types': 7.25.4 + '@babel/types': 7.25.6 transitivePeerDependencies: - supports-color @@ -3497,10 +3443,10 @@ snapshots: dependencies: '@babel/code-frame': 7.24.7 '@babel/generator': 7.25.4 - '@babel/parser': 7.25.4 + '@babel/parser': 7.25.6 '@babel/template': 7.25.0 - '@babel/types': 7.25.4 - debug: 4.3.6 + '@babel/types': 7.25.6 + debug: 4.3.7 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -3512,723 +3458,40 @@ snapshots: '@babel/types@7.25.4': dependencies: - '@babel/helper-string-parser': 7.24.8 - '@babel/helper-validator-identifier': 7.24.7 - to-fast-properties: 2.0.0 - - '@babel/types@7.25.6': - dependencies: - '@babel/helper-string-parser': 7.24.8 - '@babel/helper-validator-identifier': 7.24.7 - to-fast-properties: 2.0.0 - - '@bcoe/v8-coverage@0.2.3': {} - - '@chakra-ui/accordion@2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/descendant': 3.1.0(react@18.3.1) - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/transition': 2.1.0(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/alert@2.2.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/spinner': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/anatomy@2.2.2': {} - - '@chakra-ui/avatar@2.3.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/image': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/breadcrumb@2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/breakpoint-utils@2.0.8': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - - '@chakra-ui/button@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/spinner': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/card@2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/checkbox@2.3.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/visually-hidden': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@zag-js/focus-visible': 0.16.0 - react: 18.3.1 - - '@chakra-ui/clickable@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - react: 18.3.1 - - '@chakra-ui/close-button@2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/color-mode@2.2.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/control-box@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/counter@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/number-utils': 2.0.7 - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - react: 18.3.1 - - '@chakra-ui/css-reset@2.3.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(react@18.3.1)': - dependencies: - '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/descendant@3.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/dom-utils@2.1.0': {} - - '@chakra-ui/editable@3.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-focus-on-pointer-down': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/event-utils@2.0.8': {} - - '@chakra-ui/focus-lock@2.1.0(@types/react@18.3.5)(react@18.3.1)': - dependencies: - '@chakra-ui/dom-utils': 2.1.0 - react: 18.3.1 - react-focus-lock: 2.12.1(@types/react@18.3.5)(react@18.3.1) - transitivePeerDependencies: - - '@types/react' - - '@chakra-ui/form-control@2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/hooks@2.2.1(react@18.3.1)': - dependencies: - '@chakra-ui/react-utils': 2.0.12(react@18.3.1) - '@chakra-ui/utils': 2.0.15 - compute-scroll-into-view: 3.0.3 - copy-to-clipboard: 3.3.3 - react: 18.3.1 - - '@chakra-ui/icon@3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/image@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/input@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/object-utils': 2.1.0 - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/layout@2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/breakpoint-utils': 2.0.8 - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/object-utils': 2.1.0 - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/lazy-utils@2.0.5': {} - - '@chakra-ui/live-region@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/media-query@3.3.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/breakpoint-utils': 2.0.8 - '@chakra-ui/react-env': 3.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/menu@2.2.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/clickable': 2.1.0(react@18.3.1) - '@chakra-ui/descendant': 3.1.0(react@18.3.1) - '@chakra-ui/lazy-utils': 2.0.5 - '@chakra-ui/popper': 3.1.0(react@18.3.1) - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-animation-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-disclosure': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-focus-effect': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-outside-click': 2.2.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/transition': 2.1.0(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/modal@2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(@types/react@18.3.5)(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/close-button': 2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/focus-lock': 2.1.0(@types/react@18.3.5)(react@18.3.1) - '@chakra-ui/portal': 2.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/transition': 2.1.0(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - aria-hidden: 1.2.4 - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.5.10(@types/react@18.3.5)(react@18.3.1) - transitivePeerDependencies: - - '@types/react' - - '@chakra-ui/number-input@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/counter': 2.1.0(react@18.3.1) - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-event-listener': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-interval': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/number-utils@2.0.7': {} - - '@chakra-ui/object-utils@2.1.0': {} - - '@chakra-ui/pin-input@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/descendant': 3.1.0(react@18.3.1) - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/popover@2.2.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/close-button': 2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/lazy-utils': 2.0.5 - '@chakra-ui/popper': 3.1.0(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-animation-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-disclosure': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-focus-effect': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-focus-on-pointer-down': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/popper@3.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@popperjs/core': 2.11.8 - react: 18.3.1 - - '@chakra-ui/portal@2.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - - '@chakra-ui/progress@2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/provider@2.4.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/css-reset': 2.3.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/portal': 2.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-env': 3.1.0(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/utils': 2.0.15 - '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) - '@emotion/styled': 11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - - '@chakra-ui/radio@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@zag-js/focus-visible': 0.16.0 - react: 18.3.1 - - '@chakra-ui/react-children-utils@2.0.6(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-context@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-env@3.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-types@2.0.7(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-use-animation-state@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/dom-utils': 2.1.0 - '@chakra-ui/react-use-event-listener': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-callback-ref@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-use-controllable-state@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-disclosure@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-event-listener@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-focus-effect@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/dom-utils': 2.1.0 - '@chakra-ui/react-use-event-listener': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-focus-on-pointer-down@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-event-listener': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-interval@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-latest-ref@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-use-merge-refs@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-use-outside-click@2.2.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-pan-event@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/event-utils': 2.0.8 - '@chakra-ui/react-use-latest-ref': 2.1.0(react@18.3.1) - framesync: 6.1.2 - react: 18.3.1 - - '@chakra-ui/react-use-previous@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-use-safe-layout-effect@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-use-size@2.1.0(react@18.3.1)': - dependencies: - '@zag-js/element-size': 0.10.5 - react: 18.3.1 - - '@chakra-ui/react-use-timeout@2.1.0(react@18.3.1)': - dependencies: - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/react-use-update-effect@2.1.0(react@18.3.1)': - dependencies: - react: 18.3.1 - - '@chakra-ui/react-utils@2.0.12(react@18.3.1)': - dependencies: - '@chakra-ui/utils': 2.0.15 - react: 18.3.1 - - '@chakra-ui/react@2.8.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/accordion': 2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/alert': 2.2.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/avatar': 2.3.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/breadcrumb': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/button': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/card': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/checkbox': 2.3.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/close-button': 2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/control-box': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/counter': 2.1.0(react@18.3.1) - '@chakra-ui/css-reset': 2.3.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/editable': 3.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/focus-lock': 2.1.0(@types/react@18.3.5)(react@18.3.1) - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/hooks': 2.2.1(react@18.3.1) - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/image': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/input': 2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/layout': 2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/live-region': 2.1.0(react@18.3.1) - '@chakra-ui/media-query': 3.3.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/menu': 2.2.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/modal': 2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(@types/react@18.3.5)(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/number-input': 2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/pin-input': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/popover': 2.2.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/popper': 3.1.0(react@18.3.1) - '@chakra-ui/portal': 2.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/progress': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/provider': 2.4.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/radio': 2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-env': 3.1.0(react@18.3.1) - '@chakra-ui/select': 2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/skeleton': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/skip-nav': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/slider': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/spinner': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/stat': 2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/stepper': 2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/styled-system': 2.9.2 - '@chakra-ui/switch': 2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/table': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/tabs': 3.0.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/tag': 3.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/textarea': 2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/theme': 3.3.1(@chakra-ui/styled-system@2.9.2) - '@chakra-ui/theme-utils': 2.0.21 - '@chakra-ui/toast': 7.0.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/tooltip': 2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/transition': 2.1.0(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/utils': 2.0.15 - '@chakra-ui/visually-hidden': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) - '@emotion/styled': 11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - transitivePeerDependencies: - - '@types/react' - - '@chakra-ui/select@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/shared-utils@2.0.5': {} - - '@chakra-ui/skeleton@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/media-query': 3.3.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-use-previous': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/skip-nav@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/slider@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/number-utils': 2.0.7 - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-callback-ref': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-latest-ref': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-pan-event': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-size': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/spinner@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/stat@2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/stepper@2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/styled-system@2.9.2': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - csstype: 3.1.3 - lodash.mergewith: 4.6.2 - - '@chakra-ui/switch@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/checkbox': 2.3.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/color-mode': 2.2.0(react@18.3.1) - '@chakra-ui/object-utils': 2.1.0 - '@chakra-ui/react-utils': 2.0.12(react@18.3.1) - '@chakra-ui/styled-system': 2.9.2 - '@chakra-ui/theme-utils': 2.0.21 - '@chakra-ui/utils': 2.0.15 - '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) - '@emotion/styled': 11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1) - react: 18.3.1 - react-fast-compare: 3.2.2 - - '@chakra-ui/table@2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/tabs@3.0.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/clickable': 2.1.0(react@18.3.1) - '@chakra-ui/descendant': 3.1.0(react@18.3.1) - '@chakra-ui/lazy-utils': 2.0.5 - '@chakra-ui/react-children-utils': 2.0.6(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-controllable-state': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-safe-layout-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/tag@3.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/textarea@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - react: 18.3.1 - - '@chakra-ui/theme-tools@2.1.2(@chakra-ui/styled-system@2.9.2)': - dependencies: - '@chakra-ui/anatomy': 2.2.2 - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/styled-system': 2.9.2 - color2k: 2.0.3 - - '@chakra-ui/theme-utils@2.0.21': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/styled-system': 2.9.2 - '@chakra-ui/theme': 3.3.1(@chakra-ui/styled-system@2.9.2) - lodash.mergewith: 4.6.2 - - '@chakra-ui/theme@3.3.1(@chakra-ui/styled-system@2.9.2)': - dependencies: - '@chakra-ui/anatomy': 2.2.2 - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/styled-system': 2.9.2 - '@chakra-ui/theme-tools': 2.1.2(@chakra-ui/styled-system@2.9.2) - - '@chakra-ui/toast@7.0.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/alert': 2.2.2(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/close-button': 2.1.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/portal': 2.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-context': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-timeout': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-update-effect': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/styled-system': 2.9.2 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@chakra-ui/theme': 3.3.1(@chakra-ui/styled-system@2.9.2) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - - '@chakra-ui/tooltip@2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/dom-utils': 2.1.0 - '@chakra-ui/popper': 3.1.0(react@18.3.1) - '@chakra-ui/portal': 2.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@chakra-ui/react-types': 2.0.7(react@18.3.1) - '@chakra-ui/react-use-disclosure': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-event-listener': 2.1.0(react@18.3.1) - '@chakra-ui/react-use-merge-refs': 2.1.0(react@18.3.1) - '@chakra-ui/shared-utils': 2.0.5 - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - - '@chakra-ui/transition@2.1.0(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)': - dependencies: - '@chakra-ui/shared-utils': 2.0.5 - framer-motion: 11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 + '@babel/helper-string-parser': 7.24.8 + '@babel/helper-validator-identifier': 7.24.7 + to-fast-properties: 2.0.0 - '@chakra-ui/utils@2.0.15': + '@babel/types@7.25.6': dependencies: - '@types/lodash.mergewith': 4.6.7 - css-box-model: 1.2.1 - framesync: 6.1.2 - lodash.mergewith: 4.6.2 + '@babel/helper-string-parser': 7.24.8 + '@babel/helper-validator-identifier': 7.24.7 + to-fast-properties: 2.0.0 - '@chakra-ui/visually-hidden@2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1)': + '@bcoe/v8-coverage@0.2.3': {} + + '@chakra-ui/anatomy@2.2.2': {} + + '@chakra-ui/react@3.0.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@internationalized/date@3.5.6)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) + '@ark-ui/anatomy': 3.5.0(@internationalized/date@3.5.6) + '@ark-ui/react': 4.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@emotion/is-prop-valid': 1.3.1 + '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) + '@emotion/serialize': 1.3.2 + '@emotion/use-insertion-effect-with-fallbacks': 1.1.0(react@18.3.1) + '@emotion/utils': 1.4.1 + '@pandacss/is-valid-prop': 0.41.0 + csstype: 3.1.3 react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + transitivePeerDependencies: + - '@internationalized/date' '@emotion/babel-plugin@11.12.0': dependencies: '@babel/helper-module-imports': 7.24.7 - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 '@emotion/hash': 0.9.2 '@emotion/memoize': 0.9.0 '@emotion/serialize': 1.3.1 @@ -4251,7 +3514,7 @@ snapshots: '@emotion/hash@0.9.2': {} - '@emotion/is-prop-valid@1.3.0': + '@emotion/is-prop-valid@1.3.1': dependencies: '@emotion/memoize': 0.9.0 @@ -4259,7 +3522,7 @@ snapshots: '@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1)': dependencies: - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 '@emotion/babel-plugin': 11.12.0 '@emotion/cache': 11.13.1 '@emotion/serialize': 1.3.1 @@ -4281,22 +3544,15 @@ snapshots: '@emotion/utils': 1.4.0 csstype: 3.1.3 - '@emotion/sheet@1.4.0': {} - - '@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1)': + '@emotion/serialize@1.3.2': dependencies: - '@babel/runtime': 7.25.4 - '@emotion/babel-plugin': 11.12.0 - '@emotion/is-prop-valid': 1.3.0 - '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) - '@emotion/serialize': 1.3.1 - '@emotion/use-insertion-effect-with-fallbacks': 1.1.0(react@18.3.1) - '@emotion/utils': 1.4.0 - react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.5 - transitivePeerDependencies: - - supports-color + '@emotion/hash': 0.9.2 + '@emotion/memoize': 0.9.0 + '@emotion/unitless': 0.10.0 + '@emotion/utils': 1.4.1 + csstype: 3.1.3 + + '@emotion/sheet@1.4.0': {} '@emotion/unitless@0.10.0': {} @@ -4306,6 +3562,8 @@ snapshots: '@emotion/utils@1.4.0': {} + '@emotion/utils@1.4.1': {} + '@emotion/weak-memoize@0.4.0': {} '@esbuild/aix-ppc64@0.21.5': @@ -4420,13 +3678,20 @@ snapshots: dependencies: '@floating-ui/utils': 0.2.7 - '@floating-ui/dom@1.6.10': + '@floating-ui/dom@1.6.11': + dependencies: + '@floating-ui/core': 1.6.7 + '@floating-ui/utils': 0.2.8 + + '@floating-ui/dom@1.6.8': dependencies: '@floating-ui/core': 1.6.7 '@floating-ui/utils': 0.2.7 '@floating-ui/utils@0.2.7': {} + '@floating-ui/utils@0.2.8': {} + '@hey-api/openapi-ts@0.52.0(magicast@0.3.5)(typescript@5.5.4)': dependencies: '@apidevtools/json-schema-ref-parser': 11.6.4 @@ -4442,6 +3707,18 @@ snapshots: '@humanwhocodes/retry@0.3.0': {} + '@internationalized/date@3.5.5': + dependencies: + '@swc/helpers': 0.5.13 + + '@internationalized/date@3.5.6': + dependencies: + '@swc/helpers': 0.5.13 + + '@internationalized/number@3.5.3': + dependencies: + '@swc/helpers': 0.5.13 + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -4484,13 +3761,13 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.17.1 + '@pandacss/is-valid-prop@0.41.0': {} + '@pkgjs/parseargs@0.11.0': optional: true '@pkgr/core@0.1.1': {} - '@popperjs/core@2.11.8': {} - '@remix-run/router@1.19.2': {} '@rollup/rollup-android-arm-eabi@4.24.0': @@ -4583,7 +3860,7 @@ snapshots: '@swc/core-win32-x64-msvc@1.7.14': optional: true - '@swc/core@1.7.14': + '@swc/core@1.7.14(@swc/helpers@0.5.13)': dependencies: '@swc/counter': 0.1.3 '@swc/types': 0.1.12 @@ -4598,9 +3875,14 @@ snapshots: '@swc/core-win32-arm64-msvc': 1.7.14 '@swc/core-win32-ia32-msvc': 1.7.14 '@swc/core-win32-x64-msvc': 1.7.14 + '@swc/helpers': 0.5.13 '@swc/counter@0.1.3': {} + '@swc/helpers@0.5.13': + dependencies: + tslib: 2.6.3 + '@swc/types@0.1.12': dependencies: '@swc/counter': 0.1.3 @@ -4685,12 +3967,6 @@ snapshots: '@types/json-schema@7.0.15': {} - '@types/lodash.mergewith@4.6.7': - dependencies: - '@types/lodash': 4.17.7 - - '@types/lodash@4.17.7': {} - '@types/node@22.5.4': dependencies: undici-types: 6.19.8 @@ -4833,9 +4109,9 @@ snapshots: '@typescript-eslint/types': 8.5.0 eslint-visitor-keys: 3.4.3 - '@vitejs/plugin-react-swc@3.7.0(vite@5.4.6(@types/node@22.5.4))': + '@vitejs/plugin-react-swc@3.7.0(@swc/helpers@0.5.13)(vite@5.4.6(@types/node@22.5.4))': dependencies: - '@swc/core': 1.7.14 + '@swc/core': 1.7.14(@swc/helpers@0.5.13) vite: 5.4.6(@types/node@22.5.4) transitivePeerDependencies: - '@swc/helpers' @@ -4898,13 +4174,959 @@ snapshots: loupe: 3.1.1 tinyrainbow: 1.2.0 - '@zag-js/dom-query@0.16.0': {} + '@zag-js/accordion@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/accordion@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/anatomy@0.62.1': {} + + '@zag-js/anatomy@0.74.2': {} + + '@zag-js/aria-hidden@0.62.1': + dependencies: + '@zag-js/dom-query': 0.62.1 + + '@zag-js/aria-hidden@0.74.2': + dependencies: + aria-hidden: 1.2.4 + + '@zag-js/auto-resize@0.62.1': + dependencies: + '@zag-js/dom-query': 0.62.1 + + '@zag-js/auto-resize@0.74.2': + dependencies: + '@zag-js/dom-query': 0.74.2 + + '@zag-js/avatar@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/avatar@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/carousel@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/carousel@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/checkbox@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/checkbox@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/focus-visible': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/clipboard@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/clipboard@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/collapsible@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/collapsible@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/collection@0.62.1': + dependencies: + '@zag-js/utils': 0.62.1 + + '@zag-js/collection@0.74.2': + dependencies: + '@zag-js/utils': 0.74.2 + + '@zag-js/color-picker@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/color-utils': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/text-selection': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/color-picker@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/color-utils': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/text-selection': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/color-utils@0.62.1': + dependencies: + '@zag-js/numeric-range': 0.62.1 + + '@zag-js/color-utils@0.74.2': + dependencies: + '@zag-js/numeric-range': 0.74.2 + + '@zag-js/combobox@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/aria-hidden': 0.62.1 + '@zag-js/collection': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/combobox@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/aria-hidden': 0.74.2 + '@zag-js/collection': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/core@0.62.1': + dependencies: + '@zag-js/store': 0.62.1 + klona: 2.0.6 + + '@zag-js/core@0.74.2': + dependencies: + '@zag-js/store': 0.74.2 + '@zag-js/utils': 0.74.2 + klona: 2.0.6 + + '@zag-js/date-picker@0.62.1': + dependencies: + '@internationalized/date': 3.5.5 + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/date-utils': 0.62.1(@internationalized/date@3.5.5) + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/live-region': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/text-selection': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/date-picker@0.74.2(@internationalized/date@3.5.6)': + dependencies: + '@internationalized/date': 3.5.6 + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/date-utils': 0.74.2(@internationalized/date@3.5.6) + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/live-region': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/text-selection': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/date-utils@0.62.1(@internationalized/date@3.5.5)': + dependencies: + '@internationalized/date': 3.5.5 + + '@zag-js/date-utils@0.62.1(@internationalized/date@3.5.6)': + dependencies: + '@internationalized/date': 3.5.6 + + '@zag-js/date-utils@0.74.2(@internationalized/date@3.5.6)': + dependencies: + '@internationalized/date': 3.5.6 + + '@zag-js/dialog@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/aria-hidden': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/remove-scroll': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + focus-trap: 7.5.4 + + '@zag-js/dialog@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/aria-hidden': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/remove-scroll': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + focus-trap: 7.6.0 + + '@zag-js/dismissable@0.62.1': + dependencies: + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/interact-outside': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/dismissable@0.74.2': + dependencies: + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/interact-outside': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/dom-event@0.62.1': + dependencies: + '@zag-js/dom-query': 0.62.1 + '@zag-js/text-selection': 0.62.1 + '@zag-js/types': 0.62.1 + + '@zag-js/dom-event@0.74.2': + dependencies: + '@zag-js/dom-query': 0.74.2 + '@zag-js/text-selection': 0.74.2 + '@zag-js/types': 0.74.2 + + '@zag-js/dom-query@0.62.1': {} + + '@zag-js/dom-query@0.74.2': {} + + '@zag-js/editable@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/interact-outside': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/editable@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/interact-outside': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/element-rect@0.62.1': {} + + '@zag-js/element-rect@0.74.2': {} + + '@zag-js/element-size@0.62.1': {} + + '@zag-js/element-size@0.74.2': {} + + '@zag-js/file-upload@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/file-utils': 0.62.1 + '@zag-js/i18n-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/file-upload@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/file-utils': 0.74.2 + '@zag-js/i18n-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/file-utils@0.62.1': + dependencies: + '@zag-js/i18n-utils': 0.62.1 + + '@zag-js/file-utils@0.74.2': + dependencies: + '@zag-js/i18n-utils': 0.74.2 + + '@zag-js/focus-visible@0.74.2': + dependencies: + '@zag-js/dom-query': 0.74.2 + + '@zag-js/form-utils@0.62.1': {} - '@zag-js/element-size@0.10.5': {} + '@zag-js/form-utils@0.74.2': {} - '@zag-js/focus-visible@0.16.0': + '@zag-js/highlight-word@0.74.2': {} + + '@zag-js/hover-card@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/hover-card@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/i18n-utils@0.62.1': + dependencies: + '@zag-js/dom-query': 0.62.1 + + '@zag-js/i18n-utils@0.74.2': + dependencies: + '@zag-js/dom-query': 0.74.2 + + '@zag-js/interact-outside@0.62.1': + dependencies: + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/interact-outside@0.74.2': + dependencies: + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/live-region@0.62.1': {} + + '@zag-js/live-region@0.74.2': {} + + '@zag-js/menu@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/rect-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/menu@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/rect-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/number-input@0.62.1': + dependencies: + '@internationalized/number': 3.5.3 + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/number-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/number-input@0.74.2': + dependencies: + '@internationalized/number': 3.5.3 + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/number-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/number-utils@0.62.1': {} + + '@zag-js/number-utils@0.74.2': {} + + '@zag-js/numeric-range@0.62.1': {} + + '@zag-js/numeric-range@0.74.2': {} + + '@zag-js/pagination@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/pagination@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/pin-input@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/pin-input@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/popover@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/aria-hidden': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/remove-scroll': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + focus-trap: 7.5.4 + + '@zag-js/popover@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/aria-hidden': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/remove-scroll': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + focus-trap: 7.6.0 + + '@zag-js/popper@0.62.1': + dependencies: + '@floating-ui/dom': 1.6.8 + '@zag-js/dom-query': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/popper@0.74.2': + dependencies: + '@floating-ui/dom': 1.6.11 + '@zag-js/dom-query': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/presence@0.62.1': + dependencies: + '@zag-js/core': 0.62.1 + '@zag-js/types': 0.62.1 + + '@zag-js/presence@0.74.2': + dependencies: + '@zag-js/core': 0.74.2 + '@zag-js/types': 0.74.2 + + '@zag-js/progress@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/progress@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/qr-code@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + proxy-memoize: 3.0.1 + uqr: 0.1.2 + + '@zag-js/qr-code@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + proxy-memoize: 3.0.1 + uqr: 0.1.2 + + '@zag-js/radio-group@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/element-rect': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/radio-group@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/element-rect': 0.74.2 + '@zag-js/focus-visible': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/rating-group@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/rating-group@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/react@0.74.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@zag-js/core': 0.74.2 + '@zag-js/store': 0.74.2 + '@zag-js/types': 0.74.2 + proxy-compare: 3.0.0 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + + '@zag-js/rect-utils@0.62.1': {} + + '@zag-js/rect-utils@0.74.2': {} + + '@zag-js/remove-scroll@0.62.1': + dependencies: + '@zag-js/dom-query': 0.62.1 + + '@zag-js/remove-scroll@0.74.2': + dependencies: + '@zag-js/dom-query': 0.74.2 + + '@zag-js/select@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/collection': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/select@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/collection': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/signature-pad@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + perfect-freehand: 1.2.2 + + '@zag-js/signature-pad@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + perfect-freehand: 1.2.2 + + '@zag-js/slider@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/element-size': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/numeric-range': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/slider@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/element-size': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/numeric-range': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/splitter@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/number-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/splitter@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/number-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/steps@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/store@0.62.1': + dependencies: + proxy-compare: 3.0.0 + + '@zag-js/store@0.74.2': + dependencies: + proxy-compare: 3.0.0 + + '@zag-js/switch@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/switch@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/focus-visible': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/tabs@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/element-rect': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/tabs@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/element-rect': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/tags-input@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/auto-resize': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/form-utils': 0.62.1 + '@zag-js/interact-outside': 0.62.1 + '@zag-js/live-region': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/tags-input@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/auto-resize': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/form-utils': 0.74.2 + '@zag-js/interact-outside': 0.74.2 + '@zag-js/live-region': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/text-selection@0.62.1': + dependencies: + '@zag-js/dom-query': 0.62.1 + + '@zag-js/text-selection@0.74.2': + dependencies: + '@zag-js/dom-query': 0.74.2 + + '@zag-js/time-picker@0.62.1': + dependencies: + '@internationalized/date': 3.5.5 + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/time-picker@0.74.2(@internationalized/date@3.5.6)': + dependencies: + '@internationalized/date': 3.5.6 + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/timer@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/toast@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dismissable': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/toast@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dismissable': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/toggle-group@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/toggle-group@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/tooltip@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/popper': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/tooltip@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/focus-visible': 0.74.2 + '@zag-js/popper': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/tree-view@0.62.1': + dependencies: + '@zag-js/anatomy': 0.62.1 + '@zag-js/core': 0.62.1 + '@zag-js/dom-event': 0.62.1 + '@zag-js/dom-query': 0.62.1 + '@zag-js/types': 0.62.1 + '@zag-js/utils': 0.62.1 + + '@zag-js/tree-view@0.74.2': + dependencies: + '@zag-js/anatomy': 0.74.2 + '@zag-js/core': 0.74.2 + '@zag-js/dom-event': 0.74.2 + '@zag-js/dom-query': 0.74.2 + '@zag-js/types': 0.74.2 + '@zag-js/utils': 0.74.2 + + '@zag-js/types@0.62.1': dependencies: - '@zag-js/dom-query': 0.16.0 + csstype: 3.1.3 + + '@zag-js/types@0.74.2': + dependencies: + csstype: 3.1.3 + + '@zag-js/utils@0.62.1': {} + + '@zag-js/utils@0.74.2': {} acorn-jsx@5.3.2(acorn@8.12.1): dependencies: @@ -5036,7 +5258,7 @@ snapshots: babel-plugin-macros@3.1.0: dependencies: - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 cosmiconfig: 7.1.0 resolve: 1.22.8 @@ -5107,19 +5329,13 @@ snapshots: loupe: 3.1.1 pathval: 2.0.0 - chakra-react-select@4.9.2(uzcvocchpeesoxvtkif6ppnvaq): + chakra-react-select@6.0.0-next.2(@chakra-ui/react@3.0.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@internationalized/date@3.5.6)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.5)(next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@chakra-ui/form-control': 2.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/icon': 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/layout': 2.3.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/media-query': 3.3.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/menu': 2.2.1(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/spinner': 2.1.0(@chakra-ui/system@2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1))(react@18.3.1) - '@chakra-ui/system': 2.6.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@emotion/styled@11.13.0(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@types/react@18.3.5)(react@18.3.1))(react@18.3.1) - '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) + '@chakra-ui/react': 3.0.2(@emotion/react@11.13.3(@types/react@18.3.5)(react@18.3.1))(@internationalized/date@3.5.6)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next-themes: 0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-select: 5.8.0(@types/react@18.3.5)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-select: 5.8.2(@types/react@18.3.5)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) transitivePeerDependencies: - '@types/react' - supports-color @@ -5180,16 +5396,12 @@ snapshots: color-name@1.1.4: {} - color2k@2.0.3: {} - combined-stream@1.0.8: dependencies: delayed-stream: 1.0.0 commander@12.1.0: {} - compute-scroll-into-view@3.0.3: {} - concat-map@0.0.1: {} confbox@0.1.7: {} @@ -5198,10 +5410,6 @@ snapshots: convert-source-map@1.9.0: {} - copy-to-clipboard@3.3.3: - dependencies: - toggle-selection: 1.0.6 - core-js-compat@3.38.1: dependencies: browserslist: 4.23.3 @@ -5220,10 +5428,6 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 - css-box-model@1.2.1: - dependencies: - tiny-invariant: 1.3.3 - css.escape@1.5.1: {} csstype@3.1.3: {} @@ -5303,8 +5507,6 @@ snapshots: destr@2.0.3: {} - detect-node-es@1.1.0: {} - dir-glob@3.0.1: dependencies: path-type: 4.0.0 @@ -5319,7 +5521,7 @@ snapshots: dom-helpers@5.2.1: dependencies: - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 csstype: 3.1.3 dotenv@16.4.5: {} @@ -5699,9 +5901,13 @@ snapshots: flatted@3.3.1: {} - focus-lock@1.3.5: + focus-trap@7.5.4: dependencies: - tslib: 2.6.3 + tabbable: 6.2.0 + + focus-trap@7.6.0: + dependencies: + tabbable: 6.2.0 follow-redirects@1.15.9: {} @@ -5720,18 +5926,6 @@ snapshots: combined-stream: 1.0.8 mime-types: 2.1.35 - framer-motion@11.3.29(@emotion/is-prop-valid@1.3.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): - dependencies: - tslib: 2.6.3 - optionalDependencies: - '@emotion/is-prop-valid': 1.3.0 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - - framesync@6.1.2: - dependencies: - tslib: 2.4.0 - fs-minipass@2.1.0: dependencies: minipass: 3.3.6 @@ -5760,8 +5954,6 @@ snapshots: has-symbols: 1.0.3 hasown: 2.0.2 - get-nonce@1.0.1: {} - get-stream@8.0.1: {} get-symbol-description@1.0.2: @@ -5897,10 +6089,6 @@ snapshots: hasown: 2.0.2 side-channel: 1.0.6 - invariant@2.2.4: - dependencies: - loose-envify: 1.4.0 - is-arguments@1.1.1: dependencies: call-bind: 1.0.7 @@ -6092,6 +6280,8 @@ snapshots: dependencies: json-buffer: 3.0.1 + klona@2.0.6: {} + language-subtag-registry@0.3.23: {} language-tags@1.0.9: @@ -6113,9 +6303,9 @@ snapshots: dependencies: p-locate: 5.0.0 - lodash.merge@4.6.2: {} + lodash.debounce@4.0.8: {} - lodash.mergewith@4.6.2: {} + lodash.merge@4.6.2: {} lodash@4.17.21: {} @@ -6218,6 +6408,11 @@ snapshots: neo-async@2.6.2: {} + next-themes@0.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + node-fetch-native@1.6.4: {} node-releases@2.0.18: {} @@ -6355,6 +6550,8 @@ snapshots: perfect-debounce@1.0.0: {} + perfect-freehand@1.2.2: {} + picocolors@1.1.0: {} picomatch@2.3.1: {} @@ -6397,8 +6594,14 @@ snapshots: object-assign: 4.1.1 react-is: 16.13.1 + proxy-compare@3.0.0: {} + proxy-from-env@1.1.0: {} + proxy-memoize@3.0.1: + dependencies: + proxy-compare: 3.0.0 + punycode@2.3.1: {} queue-microtask@1.2.3: {} @@ -6408,31 +6611,12 @@ snapshots: defu: 6.1.4 destr: 2.0.3 - react-clientside-effect@1.2.6(react@18.3.1): - dependencies: - '@babel/runtime': 7.25.4 - react: 18.3.1 - react-dom@18.3.1(react@18.3.1): dependencies: loose-envify: 1.4.0 react: 18.3.1 scheduler: 0.23.2 - react-fast-compare@3.2.2: {} - - react-focus-lock@2.12.1(@types/react@18.3.5)(react@18.3.1): - dependencies: - '@babel/runtime': 7.25.4 - focus-lock: 1.3.5 - prop-types: 15.8.1 - react: 18.3.1 - react-clientside-effect: 1.2.6(react@18.3.1) - use-callback-ref: 1.3.2(@types/react@18.3.5)(react@18.3.1) - use-sidecar: 1.1.2(@types/react@18.3.5)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.5 - react-icons@5.3.0(react@18.3.1): dependencies: react: 18.3.1 @@ -6441,25 +6625,6 @@ snapshots: react-is@17.0.2: {} - react-remove-scroll-bar@2.3.6(@types/react@18.3.5)(react@18.3.1): - dependencies: - react: 18.3.1 - react-style-singleton: 2.2.1(@types/react@18.3.5)(react@18.3.1) - tslib: 2.6.3 - optionalDependencies: - '@types/react': 18.3.5 - - react-remove-scroll@2.5.10(@types/react@18.3.5)(react@18.3.1): - dependencies: - react: 18.3.1 - react-remove-scroll-bar: 2.3.6(@types/react@18.3.5)(react@18.3.1) - react-style-singleton: 2.2.1(@types/react@18.3.5)(react@18.3.1) - tslib: 2.6.3 - use-callback-ref: 1.3.2(@types/react@18.3.5)(react@18.3.1) - use-sidecar: 1.1.2(@types/react@18.3.5)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.5 - react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@remix-run/router': 1.19.2 @@ -6472,12 +6637,12 @@ snapshots: '@remix-run/router': 1.19.2 react: 18.3.1 - react-select@5.8.0(@types/react@18.3.5)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-select@5.8.2(@types/react@18.3.5)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 '@emotion/cache': 11.13.1 '@emotion/react': 11.13.3(@types/react@18.3.5)(react@18.3.1) - '@floating-ui/dom': 1.6.10 + '@floating-ui/dom': 1.6.11 '@types/react-transition-group': 4.4.11 memoize-one: 6.0.0 prop-types: 15.8.1 @@ -6489,18 +6654,9 @@ snapshots: - '@types/react' - supports-color - react-style-singleton@2.2.1(@types/react@18.3.5)(react@18.3.1): - dependencies: - get-nonce: 1.0.1 - invariant: 2.2.4 - react: 18.3.1 - tslib: 2.6.3 - optionalDependencies: - '@types/react': 18.3.5 - react-transition-group@4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@babel/runtime': 7.25.4 + '@babel/runtime': 7.25.6 dom-helpers: 5.2.1 loose-envify: 1.4.0 prop-types: 15.8.1 @@ -6775,6 +6931,8 @@ snapshots: '@pkgr/core': 0.1.1 tslib: 2.6.3 + tabbable@6.2.0: {} + tar@6.2.1: dependencies: chownr: 2.0.0 @@ -6792,8 +6950,6 @@ snapshots: text-table@0.2.0: {} - tiny-invariant@1.3.3: {} - tinybench@2.9.0: {} tinyexec@0.3.0: {} @@ -6810,8 +6966,6 @@ snapshots: dependencies: is-number: 7.0.0 - toggle-selection@1.0.6: {} - ts-api-utils@1.3.0(typescript@5.5.4): dependencies: typescript: 5.5.4 @@ -6821,8 +6975,6 @@ snapshots: '@ts-morph/common': 0.24.0 code-block-writer: 13.0.2 - tslib@2.4.0: {} - tslib@2.6.3: {} type-check@0.4.0: @@ -6898,17 +7050,12 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.0 + uqr@0.1.2: {} + uri-js@4.4.1: dependencies: punycode: 2.3.1 - use-callback-ref@1.3.2(@types/react@18.3.5)(react@18.3.1): - dependencies: - react: 18.3.1 - tslib: 2.6.3 - optionalDependencies: - '@types/react': 18.3.5 - use-debounce@10.0.3(react@18.3.1): dependencies: react: 18.3.1 @@ -6919,13 +7066,10 @@ snapshots: optionalDependencies: '@types/react': 18.3.5 - use-sidecar@1.1.2(@types/react@18.3.5)(react@18.3.1): + usehooks-ts@3.1.0(react@18.3.1): dependencies: - detect-node-es: 1.1.0 + lodash.debounce: 4.0.8 react: 18.3.1 - tslib: 2.6.3 - optionalDependencies: - '@types/react': 18.3.5 validate-npm-package-license@3.0.4: dependencies: diff --git a/airflow/ui/rules/typescript.js b/airflow/ui/rules/typescript.js index 2856e89b5a7f1..9f33607a0e84c 100644 --- a/airflow/ui/rules/typescript.js +++ b/airflow/ui/rules/typescript.js @@ -1804,7 +1804,10 @@ export const typescriptRules = * ``` * @see [@typescript-eslint/strict-boolean-expressions](https://typescript-eslint.io/rules/strict-boolean-expressions/) */ - [`${typescriptNamespace}/strict-boolean-expressions`]: ERROR, + [`${typescriptNamespace}/strict-boolean-expressions`]: [ + ERROR, + { allowNullableBoolean: true }, + ], /** * If you'll use switch, make sure to cover every possible value. diff --git a/airflow/ui/src/App.test.tsx b/airflow/ui/src/App.test.tsx deleted file mode 100644 index 38b90d1c4983c..0000000000000 --- a/airflow/ui/src/App.test.tsx +++ /dev/null @@ -1,124 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import type { QueryObserverSuccessResult } from "@tanstack/react-query"; -import { render } from "@testing-library/react"; -import { afterEach, beforeEach, describe, it, vi } from "vitest"; - -import * as openapiQueriesModule from "openapi/queries"; -import type { DAGCollectionResponse } from "openapi/requests/types.gen"; - -import { App } from "./App"; -import { Wrapper } from "./utils/Wrapper"; - -// The null fields actually have to be null instead of undefined -/* eslint-disable unicorn/no-null */ - -const mockListDags: DAGCollectionResponse = { - dags: [ - { - dag_display_name: "nested_groups", - dag_id: "nested_groups", - default_view: "grid", - description: null, - file_token: - "Ii9maWxlcy9kYWdzL25lc3RlZF90YXNrX2dyb3Vwcy5weSI.G3EkdxmDUDQsVb7AIZww1TSGlFE", - fileloc: "/files/dags/nested_task_groups.py", - has_import_errors: false, - has_task_concurrency_limits: false, - is_active: true, - is_paused: false, - last_expired: null, - last_parsed_time: "2024-08-22T13:50:10.372238+00:00", - last_pickled: null, - max_active_runs: 16, - max_active_tasks: 16, - max_consecutive_failed_dag_runs: 0, - next_dagrun: "2024-08-22T00:00:00+00:00", - next_dagrun_create_after: "2024-08-23T00:00:00+00:00", - next_dagrun_data_interval_end: "2024-08-23T00:00:00+00:00", - next_dagrun_data_interval_start: "2024-08-22T00:00:00+00:00", - owners: ["airflow"], - pickle_id: null, - scheduler_lock: null, - tags: [], - timetable_description: "", - timetable_summary: "", - }, - { - dag_display_name: "simple_bash_operator", - dag_id: "simple_bash_operator", - default_view: "grid", - description: null, - file_token: - "Ii9maWxlcy9kYWdzL3NpbXBsZV9iYXNoX29wZXJhdG9yLnB5Ig.RteaxTC78ceHlgMkfU3lfznlcLI", - fileloc: "/files/dags/simple_bash_operator.py", - has_import_errors: false, - has_task_concurrency_limits: false, - is_active: true, - is_paused: false, - last_expired: null, - last_parsed_time: "2024-08-22T13:50:10.368561+00:00", - last_pickled: null, - max_active_runs: 16, - max_active_tasks: 16, - max_consecutive_failed_dag_runs: 0, - next_dagrun: "2024-08-22T00:00:00+00:00", - next_dagrun_create_after: "2024-08-23T00:00:00+00:00", - next_dagrun_data_interval_end: "2024-08-23T00:00:00+00:00", - next_dagrun_data_interval_start: "2024-08-22T00:00:00+00:00", - owners: ["airflow"], - pickle_id: null, - scheduler_lock: null, - tags: [ - { - dag_id: "dag", - name: "example2", - }, - { - dag_id: "dag", - name: "example", - }, - ], - timetable_description: "At 00:00", - timetable_summary: "sum", - }, - ], - total_entries: 2, -}; - -beforeEach(() => { - const returnValue = { - data: mockListDags, - isLoading: false, - } as QueryObserverSuccessResult; - - vi.spyOn(openapiQueriesModule, "useDagServiceGetDags").mockImplementation( - () => returnValue, - ); -}); - -afterEach(() => { - vi.restoreAllMocks(); -}); - -describe("App", () => { - it("App component should render", () => { - render(, { wrapper: Wrapper }); - }); -}); diff --git a/airflow/ui/src/components/DataTable/CardList.tsx b/airflow/ui/src/components/DataTable/CardList.tsx index ddebff81b2495..da141e8eb3d5c 100644 --- a/airflow/ui/src/components/DataTable/CardList.tsx +++ b/airflow/ui/src/components/DataTable/CardList.tsx @@ -17,54 +17,38 @@ * under the License. */ import { Box, SimpleGrid, Skeleton } from "@chakra-ui/react"; -import { - type CoreRow, - flexRender, - type Table as TanStackTable, -} from "@tanstack/react-table"; -import type { SyntheticEvent } from "react"; +import { flexRender, type Table as TanStackTable } from "@tanstack/react-table"; import type { CardDef } from "./types"; type DataTableProps = { readonly cardDef: CardDef; readonly isLoading?: boolean; - readonly onRowClick?: (e: SyntheticEvent, row: CoreRow) => void; readonly table: TanStackTable; }; export const CardList = ({ cardDef, isLoading, - onRowClick, table, -}: DataTableProps) => { - const defaultGridProps = { column: { base: 1 }, spacing: 2 }; - - return ( - - - {table.getRowModel().rows.map((row) => ( - onRowClick(event, row) : undefined} - title={onRowClick ? "View details" : undefined} - > - {Boolean(isLoading) && - (cardDef.meta?.customSkeleton ?? ( - - ))} - {!Boolean(isLoading) && - flexRender(cardDef.card, { row: row.original })} - - ))} - - - ); -}; +}: DataTableProps) => ( + + + {table.getRowModel().rows.map((row) => ( + + {Boolean(isLoading) && + (cardDef.meta?.customSkeleton ?? ( + + ))} + {!Boolean(isLoading) && + flexRender(cardDef.card, { row: row.original })} + + ))} + + +); diff --git a/airflow/ui/src/components/DataTable/DataTable.test.tsx b/airflow/ui/src/components/DataTable/DataTable.test.tsx index 028ba27ce2a94..b6f88458cf72c 100644 --- a/airflow/ui/src/components/DataTable/DataTable.test.tsx +++ b/airflow/ui/src/components/DataTable/DataTable.test.tsx @@ -22,6 +22,8 @@ import "@testing-library/jest-dom"; import { render, screen } from "@testing-library/react"; import { describe, expect, it, vi } from "vitest"; +import { ChakraWrapper } from "src/utils/ChakraWrapper.tsx"; + import { DataTable } from "./DataTable.tsx"; import type { CardDef } from "./types.ts"; @@ -52,6 +54,9 @@ describe("DataTable", () => { onStateChange={onStateChange} total={2} />, + { + wrapper: ChakraWrapper, + }, ); expect(screen.getByText("John Doe")).toBeInTheDocument(); @@ -67,10 +72,12 @@ describe("DataTable", () => { onStateChange={onStateChange} total={2} />, + { + wrapper: ChakraWrapper, + }, ); - expect(screen.getByText("<<")).toBeDisabled(); - expect(screen.getByText("<")).toBeDisabled(); + expect(screen.getByTestId("prev")).toBeDisabled(); }); it("disables next button when on last page", () => { @@ -79,26 +86,32 @@ describe("DataTable", () => { columns={columns} data={data} initialState={{ - pagination: { pageIndex: 1, pageSize: 10 }, + pagination: { pageIndex: 0, pageSize: 10 }, sorting: [], }} onStateChange={onStateChange} total={2} />, + { + wrapper: ChakraWrapper, + }, ); - expect(screen.getByText(">>")).toBeDisabled(); - expect(screen.getByText(">")).toBeDisabled(); + expect(screen.getByTestId("next")).toBeDisabled(); }); it("when isLoading renders skeleton columns", () => { - render(); + render(, { + wrapper: ChakraWrapper, + }); expect(screen.getAllByTestId("skeleton")).toHaveLength(10); }); it("still displays table if mode is card but there is no cardDef", () => { - render(); + render(, { + wrapper: ChakraWrapper, + }); expect(screen.getByText("Name")).toBeInTheDocument(); }); @@ -111,6 +124,9 @@ describe("DataTable", () => { data={data} displayMode="card" />, + { + wrapper: ChakraWrapper, + }, ); expect(screen.getByText("My name is John Doe.")).toBeInTheDocument(); @@ -126,6 +142,9 @@ describe("DataTable", () => { isLoading skeletonCount={5} />, + { + wrapper: ChakraWrapper, + }, ); expect(screen.getAllByTestId("skeleton")).toHaveLength(5); diff --git a/airflow/ui/src/components/DataTable/DataTable.tsx b/airflow/ui/src/components/DataTable/DataTable.tsx index 2ed1a4f16ea3a..d59d86bb59435 100644 --- a/airflow/ui/src/components/DataTable/DataTable.tsx +++ b/airflow/ui/src/components/DataTable/DataTable.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { Progress, Text } from "@chakra-ui/react"; +import { HStack, Text } from "@chakra-ui/react"; import { getCoreRowModel, getExpandedRowModel, @@ -30,9 +30,9 @@ import { } from "@tanstack/react-table"; import React, { type ReactNode, useCallback, useRef } from "react"; +import { ProgressBar, Pagination } from "../ui"; import { CardList } from "./CardList"; import { TableList } from "./TableList"; -import { TablePaginator } from "./TablePaginator"; import { createSkeletonMock } from "./skeleton"; import type { CardDef, MetaColumn, TableState } from "./types"; @@ -122,8 +122,7 @@ export const DataTable = ({ return ( <> - ({ {display === "card" && cardDef !== undefined && ( )} - + table.setPageIndex(page.page - 1)} + page={table.getState().pagination.pageIndex + 1} + pageSize={table.getState().pagination.pageSize} + siblingCount={1} + > + + + + + + ); }; diff --git a/airflow/ui/src/components/DataTable/TableList.tsx b/airflow/ui/src/components/DataTable/TableList.tsx index 97b7fd6aed080..0427f44cfb52f 100644 --- a/airflow/ui/src/components/DataTable/TableList.tsx +++ b/airflow/ui/src/components/DataTable/TableList.tsx @@ -16,15 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { - Table as ChakraTable, - TableContainer, - Tbody, - Td, - Th, - Thead, - Tr, -} from "@chakra-ui/react"; +import { Button, Table } from "@chakra-ui/react"; import { flexRender, type Row, @@ -48,78 +40,85 @@ export const TableList = ({ renderSubComponent, table, }: DataTableProps) => ( - - - - {table.getHeaderGroups().map((headerGroup) => ( - - {headerGroup.headers.map( - ({ colSpan, column, getContext, id, isPlaceholder }) => { - const sort = column.getIsSorted(); - const canSort = column.getCanSort(); + + + {table.getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map( + ({ colSpan, column, getContext, id, isPlaceholder }) => { + const sort = column.getIsSorted(); + const canSort = column.getCanSort(); + const text = flexRender(column.columnDef.header, getContext()); + + let rightIcon; + + if (canSort) { + if (sort === "desc") { + rightIcon = ( + + ); + } else if (sort === "asc") { + rightIcon = ; + } else { + rightIcon = ; + } return ( - {isPlaceholder ? undefined : ( - <>{flexRender(column.columnDef.header, getContext())} + )} - {canSort && sort === false ? ( - - ) : undefined} - {canSort && sort !== false ? ( - sort === "desc" ? ( - - ) : ( - - ) - ) : undefined} - + ); - }, - )} - - ))} - - - {table.getRowModel().rows.map((row) => ( - - - {/* first row is a normal row */} - {row.getVisibleCells().map((cell) => ( - - {flexRender(cell.column.columnDef.cell, cell.getContext())} - - ))} - - {row.getIsExpanded() && ( - - {/* 2nd row is a custom 1 cell row */} - - {renderSubComponent?.({ row })} - - - )} - - ))} - - - + } + + return ( + + {isPlaceholder ? undefined : text} + + ); + }, + )} + + ))} + + + {table.getRowModel().rows.map((row) => ( + + + {/* first row is a normal row */} + {row.getVisibleCells().map((cell) => ( + + {flexRender(cell.column.columnDef.cell, cell.getContext())} + + ))} + + {row.getIsExpanded() && ( + + {/* 2nd row is a custom 1 cell row */} + + {renderSubComponent?.({ row })} + + + )} + + ))} + + ); diff --git a/airflow/ui/src/components/DataTable/TablePaginator.tsx b/airflow/ui/src/components/DataTable/TablePaginator.tsx deleted file mode 100644 index ad26faaef631f..0000000000000 --- a/airflow/ui/src/components/DataTable/TablePaginator.tsx +++ /dev/null @@ -1,89 +0,0 @@ -/*! - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { Box, Button } from "@chakra-ui/react"; -import type { Table as TanStackTable } from "@tanstack/react-table"; -import { useCallback } from "react"; - -type PaginatorProps = { - readonly table: TanStackTable; -}; - -export const TablePaginator = ({ table }: PaginatorProps) => { - const pageInterval = 3; - const currentPageNumber = table.getState().pagination.pageIndex + 1; - const startPageNumber = Math.max(1, currentPageNumber - pageInterval); - const endPageNumber = Math.min( - table.getPageCount(), - startPageNumber + pageInterval * 2, - ); - const pageNumbers = []; - - const setPageIndex = useCallback( - (index: number) => () => table.setPageIndex(index - 1), - [table], - ); - - for (let index = startPageNumber; index <= endPageNumber; index += 1) { - pageNumbers.push( - , - ); - } - - return ( - - - - - {pageNumbers} - - - - ); -}; diff --git a/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx b/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx index 489fbdc1ce3b6..b442c421322ff 100644 --- a/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx +++ b/airflow/ui/src/components/DataTable/ToggleTableDisplay.tsx @@ -27,28 +27,26 @@ type Props = { }; export const ToggleTableDisplay = ({ display, setDisplay }: Props) => ( - + } - isActive={display === "card"} minWidth={8} onClick={() => setDisplay("card")} - variant="outline" + variant={display === "table" ? "outline" : "solid"} width={8} - /> + > + + } - isActive={display === "table"} minWidth={8} onClick={() => setDisplay("table")} - variant="outline" + variant={display === "card" ? "outline" : "solid"} width={8} - /> + > + + ); diff --git a/airflow/ui/src/components/DataTable/index.tsx b/airflow/ui/src/components/DataTable/index.ts similarity index 100% rename from airflow/ui/src/components/DataTable/index.tsx rename to airflow/ui/src/components/DataTable/index.ts diff --git a/airflow/ui/src/components/ErrorAlert.tsx b/airflow/ui/src/components/ErrorAlert.tsx index 3128a2cdec2c4..cfb4048962d37 100644 --- a/airflow/ui/src/components/ErrorAlert.tsx +++ b/airflow/ui/src/components/ErrorAlert.tsx @@ -16,12 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -import { Alert, AlertIcon } from "@chakra-ui/react"; import type { ApiError } from "openapi-gen/requests/core/ApiError"; import type { HTTPExceptionResponse, HTTPValidationError, } from "openapi-gen/requests/types.gen"; +import { FiAlertTriangle } from "react-icons/fi"; + +import { Alert } from "./ui"; type ExpandedApiError = { body: HTTPExceptionResponse | HTTPValidationError; @@ -58,7 +60,7 @@ export const ErrorAlert = ({ error: err }: Props) => { return ( - + {error.message}
{detailMessage} diff --git a/airflow/ui/src/components/QuickFilterButton.tsx b/airflow/ui/src/components/QuickFilterButton.tsx index 24424686a3bd6..e0cb5c23c6e38 100644 --- a/airflow/ui/src/components/QuickFilterButton.tsx +++ b/airflow/ui/src/components/QuickFilterButton.tsx @@ -18,12 +18,20 @@ */ import { Button, type ButtonProps } from "@chakra-ui/react"; -export const QuickFilterButton = ({ children, ...rest }: ButtonProps) => ( +type QuickFilterButtonProps = { + readonly active: boolean; +} & ButtonProps; + +export const QuickFilterButton = ({ + active, + children, + ...rest +}: QuickFilterButtonProps) => ( - + } + startElement={} + > + ); }; diff --git a/airflow/ui/src/components/Time.test.tsx b/airflow/ui/src/components/Time.test.tsx index 9e59d96dd4714..2b6b26d6a2f20 100644 --- a/airflow/ui/src/components/Time.test.tsx +++ b/airflow/ui/src/components/Time.test.tsx @@ -54,11 +54,12 @@ describe("Test Time and TimezoneProvider", () => { }, ); - const samoaTime = screen.getByText(dayjs(now).tz(tz).format(defaultFormat)); + const nowTime = dayjs(now); + const samoaTime = screen.getByText(nowTime.tz(tz).format(defaultFormat)); expect(samoaTime).toBeDefined(); expect(samoaTime.title).toEqual( - dayjs().tz("UTC").format(defaultFormatWithTZ), + nowTime.tz("UTC").format(defaultFormatWithTZ), ); }); }); diff --git a/airflow/ui/src/components/TogglePause.tsx b/airflow/ui/src/components/TogglePause.tsx index 50362187c8ad7..dd2a77163ce3d 100644 --- a/airflow/ui/src/components/TogglePause.tsx +++ b/airflow/ui/src/components/TogglePause.tsx @@ -16,15 +16,17 @@ * specific language governing permissions and limitations * under the License. */ -import { Switch } from "@chakra-ui/react"; import { useQueryClient } from "@tanstack/react-query"; import { useCallback } from "react"; import { + UseDagServiceGetDagDetailsKeyFn, useDagServiceGetDagsKey, useDagServicePatchDag, } from "openapi/queries"; +import { Switch } from "./ui"; + type Props = { readonly dagId: string; readonly isPaused: boolean; @@ -37,6 +39,10 @@ export const TogglePause = ({ dagId, isPaused }: Props) => { await queryClient.invalidateQueries({ queryKey: [useDagServiceGetDagsKey], }); + + await queryClient.invalidateQueries({ + queryKey: UseDagServiceGetDagDetailsKeyFn({ dagId }), + }); }; const { mutate } = useDagServicePatchDag({ @@ -52,5 +58,12 @@ export const TogglePause = ({ dagId, isPaused }: Props) => { }); }, [dagId, isPaused, mutate]); - return ; + return ( + + ); }; diff --git a/airflow/ui/src/components/ui/Alert.tsx b/airflow/ui/src/components/ui/Alert.tsx new file mode 100644 index 0000000000000..10cec224d95a5 --- /dev/null +++ b/airflow/ui/src/components/ui/Alert.tsx @@ -0,0 +1,69 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Alert as ChakraAlert } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +import { CloseButton } from "./CloseButton"; + +export type AlertProps = { + closable?: boolean; + endElement?: React.ReactNode; + icon?: React.ReactElement; + onClose?: () => void; + startElement?: React.ReactNode; + title?: React.ReactNode; +} & Omit; + +export const Alert = forwardRef((props, ref) => { + const { + children, + closable, + endElement, + icon, + onClose, + startElement, + title, + ...rest + } = props; + + return ( + + {startElement ?? {icon}} + {Boolean(children) ? ( + + {title} + {children} + + ) : ( + {title} + )} + {endElement} + {Boolean(closable) ? ( + + ) : undefined} + + ); +}); diff --git a/airflow/ui/src/components/ui/CloseButton.tsx b/airflow/ui/src/components/ui/CloseButton.tsx new file mode 100644 index 0000000000000..7811bbd325e1c --- /dev/null +++ b/airflow/ui/src/components/ui/CloseButton.tsx @@ -0,0 +1,32 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { ButtonProps as ChakraCloseButtonProps } from "@chakra-ui/react"; +import { IconButton as ChakraIconButton } from "@chakra-ui/react"; +import { forwardRef } from "react"; +import { LuX } from "react-icons/lu"; + +export type CloseButtonProps = {} & ChakraCloseButtonProps; + +export const CloseButton = forwardRef( + (props, ref) => ( + + {props.children ?? } + + ), +); diff --git a/airflow/ui/src/components/ui/Dialog/CloseTrigger.tsx b/airflow/ui/src/components/ui/Dialog/CloseTrigger.tsx new file mode 100644 index 0000000000000..0ea9beba0b9cf --- /dev/null +++ b/airflow/ui/src/components/ui/Dialog/CloseTrigger.tsx @@ -0,0 +1,39 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Dialog as ChakraDialog } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +import { CloseButton } from "../CloseButton"; + +export const CloseTrigger = forwardRef< + HTMLButtonElement, + ChakraDialog.CloseTriggerProps +>((props, ref) => ( + + + {props.children} + + +)); diff --git a/airflow/ui/src/components/ui/Dialog/Content.tsx b/airflow/ui/src/components/ui/Dialog/Content.tsx new file mode 100644 index 0000000000000..5502a724e4cf3 --- /dev/null +++ b/airflow/ui/src/components/ui/Dialog/Content.tsx @@ -0,0 +1,49 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Dialog as ChakraDialog, Portal } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +type ContentProps = { + backdrop?: boolean; + portalled?: boolean; + portalRef?: React.RefObject; +} & ChakraDialog.ContentProps; + +export const Content = forwardRef( + (props, ref) => { + const { + backdrop = true, + children, + portalled = true, + portalRef, + ...rest + } = props; + + return ( + + {backdrop ? : undefined} + + + {children} + + + + ); + }, +); diff --git a/airflow/ui/src/layouts/Nav/navButtonProps.ts b/airflow/ui/src/components/ui/Dialog/index.ts similarity index 73% rename from airflow/ui/src/layouts/Nav/navButtonProps.ts rename to airflow/ui/src/components/ui/Dialog/index.ts index 740348bc9676b..a53f681763262 100644 --- a/airflow/ui/src/layouts/Nav/navButtonProps.ts +++ b/airflow/ui/src/components/ui/Dialog/index.ts @@ -16,15 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -import type { ButtonProps } from "@chakra-ui/react"; +import { Dialog as ChakraDialog } from "@chakra-ui/react"; -export const navButtonProps: ButtonProps = { - alignItems: "center", - borderRadius: "none", - flexDir: "column", - height: 16, - transition: "0.2s background-color ease-in-out", - variant: "ghost", - whiteSpace: "wrap", - width: 24, +import { CloseTrigger } from "./CloseTrigger"; +import { Content } from "./Content"; + +export const Dialog = { + ...ChakraDialog, + CloseTrigger, + Content, }; diff --git a/airflow/ui/src/components/ui/InputGroup.tsx b/airflow/ui/src/components/ui/InputGroup.tsx new file mode 100644 index 0000000000000..1835b596994f7 --- /dev/null +++ b/airflow/ui/src/components/ui/InputGroup.tsx @@ -0,0 +1,66 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* eslint-disable @typescript-eslint/strict-boolean-expressions */ + +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +import type { BoxProps, InputElementProps } from "@chakra-ui/react"; +import { Group, InputElement } from "@chakra-ui/react"; +import { cloneElement, forwardRef } from "react"; + +export type InputGroupProps = { + children: React.ReactElement; + endElement?: React.ReactNode; + endElementProps?: InputElementProps; + startElement?: React.ReactNode; + startElementProps?: InputElementProps; +} & BoxProps; + +export const InputGroup = forwardRef( + (props, ref) => { + const { + children, + endElement, + endElementProps, + startElement, + startElementProps, + ...rest + } = props; + + return ( + + {startElement ? ( + + {startElement} + + ) : undefined} + {cloneElement(children, { + ...(startElement && { ps: "calc(var(--input-height) - 6px)" }), + ...(endElement && { pe: "calc(var(--input-height) - 6px)" }), + ...children.props, + })} + {endElement ? ( + + {endElement} + + ) : undefined} + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Menu.tsx b/airflow/ui/src/components/ui/Menu.tsx new file mode 100644 index 0000000000000..23bf29a42ae87 --- /dev/null +++ b/airflow/ui/src/components/ui/Menu.tsx @@ -0,0 +1,42 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Menu as ChakraMenu, Portal } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +type MenuContentProps = { + portalled?: boolean; + portalRef?: React.RefObject; +} & ChakraMenu.ContentProps; + +const Content = forwardRef((props, ref) => { + const { portalled = true, portalRef, ...rest } = props; + + return ( + + + + + + ); +}); + +export const Menu = { + ...ChakraMenu, + Content, +}; diff --git a/airflow/ui/src/components/ui/Pagination/Ellipsis.tsx b/airflow/ui/src/components/ui/Pagination/Ellipsis.tsx new file mode 100644 index 0000000000000..217353304206c --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/Ellipsis.tsx @@ -0,0 +1,40 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Button, Pagination as ChakraPagination } from "@chakra-ui/react"; +import { forwardRef } from "react"; +import { HiMiniEllipsisHorizontal } from "react-icons/hi2"; + +import { paginationContext } from "./context"; + +const [, useRootProps] = paginationContext; + +export const Ellipsis = forwardRef< + HTMLDivElement, + ChakraPagination.EllipsisProps +>((props, ref) => { + const { size, variantMap } = useRootProps(); + + return ( + + + + ); +}); diff --git a/airflow/ui/src/components/ui/Pagination/Item.tsx b/airflow/ui/src/components/ui/Pagination/Item.tsx new file mode 100644 index 0000000000000..d38a1b03b0440 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/Item.tsx @@ -0,0 +1,54 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { ButtonProps } from "@chakra-ui/react"; +import { + Button, + Pagination as ChakraPagination, + usePaginationContext, +} from "@chakra-ui/react"; +import { forwardRef } from "react"; + +import { paginationContext } from "./context"; + +type PaginationVariant = "outline" | "solid" | "subtle"; + +const [, useRootProps] = paginationContext; + +export type PaginationRootProps = { + size?: ButtonProps["size"]; + variant?: PaginationVariant; +} & Omit; + +export const Item = forwardRef( + (props, ref) => { + const { page } = usePaginationContext(); + const { size, variantMap } = useRootProps(); + + const current = page === props.value; + const variant = current ? variantMap.current : variantMap.default; + + return ( + + + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Pagination/Items.tsx b/airflow/ui/src/components/ui/Pagination/Items.tsx new file mode 100644 index 0000000000000..06b9c155bf9c2 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/Items.tsx @@ -0,0 +1,36 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Pagination as ChakraPagination } from "@chakra-ui/react"; + +import { Ellipsis } from "./Ellipsis"; +import { Item } from "./Item"; + +export const Items = (props: React.HTMLAttributes) => ( + + {({ pages }) => + pages.map((page, index) => + page.type === "ellipsis" ? ( + + ) : ( + + ), + ) + } + +); diff --git a/airflow/ui/src/components/ui/Pagination/NextTrigger.tsx b/airflow/ui/src/components/ui/Pagination/NextTrigger.tsx new file mode 100644 index 0000000000000..079d371c99c99 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/NextTrigger.tsx @@ -0,0 +1,40 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Pagination as ChakraPagination, IconButton } from "@chakra-ui/react"; +import { forwardRef } from "react"; +import { HiChevronRight } from "react-icons/hi2"; + +import { paginationContext } from "./context"; + +const [, useRootProps] = paginationContext; + +export const NextTrigger = forwardRef< + HTMLButtonElement, + ChakraPagination.NextTriggerProps +>((props, ref) => { + const { size, variantMap } = useRootProps(); + + return ( + + + + + + ); +}); diff --git a/airflow/ui/src/components/ui/Pagination/PageText.tsx b/airflow/ui/src/components/ui/Pagination/PageText.tsx new file mode 100644 index 0000000000000..017e34d0be836 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/PageText.tsx @@ -0,0 +1,48 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { TextProps } from "@chakra-ui/react"; +import { Text, usePaginationContext } from "@chakra-ui/react"; +import { forwardRef, useMemo } from "react"; + +type PageTextProps = { + format?: "compact" | "long" | "short"; +} & TextProps; + +export const PageText = forwardRef( + (props, ref) => { + const { format = "compact", ...rest } = props; + const { count, page, pageRange, pages } = usePaginationContext(); + const content = useMemo(() => { + if (format === "short") { + return `${page} / ${pages.length}`; + } + if (format === "compact") { + return `${page} of ${pages.length}`; + } + + return `${pageRange.start + 1} - ${pageRange.end} of ${count}`; + }, [format, page, pages.length, pageRange, count]); + + return ( + + {content} + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Pagination/PrevTrigger.tsx b/airflow/ui/src/components/ui/Pagination/PrevTrigger.tsx new file mode 100644 index 0000000000000..49e647838f5f7 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/PrevTrigger.tsx @@ -0,0 +1,40 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Pagination as ChakraPagination, IconButton } from "@chakra-ui/react"; +import { forwardRef } from "react"; +import { HiChevronLeft } from "react-icons/hi2"; + +import { paginationContext } from "./context"; + +const [, useRootProps] = paginationContext; + +export const PrevTrigger = forwardRef< + HTMLButtonElement, + ChakraPagination.PrevTriggerProps +>((props, ref) => { + const { size, variantMap } = useRootProps(); + + return ( + + + + + + ); +}); diff --git a/airflow/ui/src/components/ui/Pagination/Root.tsx b/airflow/ui/src/components/ui/Pagination/Root.tsx new file mode 100644 index 0000000000000..a1a917e408a85 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/Root.tsx @@ -0,0 +1,56 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { ButtonProps } from "@chakra-ui/react"; +import { Pagination as ChakraPagination } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +import { paginationContext } from "./context"; + +type ButtonVariantMap = { + current: ButtonProps["variant"]; + default: ButtonProps["variant"]; + ellipsis: ButtonProps["variant"]; +}; + +type PaginationVariant = "outline" | "solid" | "subtle"; + +const [RootPropsProvider] = paginationContext; + +export type PaginationRootProps = { + size?: ButtonProps["size"]; + variant?: PaginationVariant; +} & Omit; + +const VARIANT_MAP: Record = { + outline: { current: "outline", default: "ghost", ellipsis: "plain" }, + solid: { current: "solid", default: "outline", ellipsis: "outline" }, + subtle: { current: "subtle", default: "ghost", ellipsis: "plain" }, +}; + +export const Root = forwardRef( + (props, ref) => { + const { size = "sm", variant = "outline", ...rest } = props; + + return ( + + + + ); + }, +); diff --git a/airflow/ui/prettier.config.js b/airflow/ui/src/components/ui/Pagination/context.ts similarity index 60% rename from airflow/ui/prettier.config.js rename to airflow/ui/src/components/ui/Pagination/context.ts index 7846b10ed0f7c..9762cee87cddf 100644 --- a/airflow/ui/prettier.config.js +++ b/airflow/ui/src/components/ui/Pagination/context.ts @@ -16,24 +16,19 @@ * specific language governing permissions and limitations * under the License. */ +import { type ButtonProps, createContext } from "@chakra-ui/react"; -/** - * @import { Config } from "prettier"; - * @import { PluginConfig } from "@trivago/prettier-plugin-sort-imports"; - */ +type ButtonVariantMap = { + current: ButtonProps["variant"]; + default: ButtonProps["variant"]; + ellipsis: ButtonProps["variant"]; +}; -/** - * Prettier configuration. - */ -export default /** @type {const} @satisfies {Config & PluginConfig} */ ({ - endOfLine: "lf", - importOrder: ["", "^(src|openapi)/", "^[./]"], - importOrderSeparation: true, - jsxSingleQuote: false, - plugins: ["@trivago/prettier-plugin-sort-imports"], - printWidth: 80, - singleQuote: false, - tabWidth: 2, - trailingComma: "all", - useTabs: false, +type ButtonVariantContext = { + size: ButtonProps["size"]; + variantMap: ButtonVariantMap; +}; + +export const paginationContext = createContext({ + name: "RootPropsProvider", }); diff --git a/airflow/ui/src/components/ui/Pagination/index.ts b/airflow/ui/src/components/ui/Pagination/index.ts new file mode 100644 index 0000000000000..c7a983ec92625 --- /dev/null +++ b/airflow/ui/src/components/ui/Pagination/index.ts @@ -0,0 +1,35 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Ellipsis } from "./Ellipsis"; +import { Item } from "./Item"; +import { Items } from "./Items"; +import { NextTrigger } from "./NextTrigger"; +import { PageText } from "./PageText"; +import { PrevTrigger } from "./PrevTrigger"; +import { Root } from "./Root"; + +export const Pagination = { + Ellipsis, + Item, + Items, + NextTrigger, + PageText, + PrevTrigger, + Root, +}; diff --git a/airflow/ui/src/App.tsx b/airflow/ui/src/components/ui/ProgressBar.tsx similarity index 66% rename from airflow/ui/src/App.tsx rename to airflow/ui/src/components/ui/ProgressBar.tsx index b8c73c35c7fc3..c37fbb1990425 100644 --- a/airflow/ui/src/App.tsx +++ b/airflow/ui/src/components/ui/ProgressBar.tsx @@ -16,18 +16,15 @@ * specific language governing permissions and limitations * under the License. */ -import { Route, Routes } from "react-router-dom"; +import { Progress as ChakraProgress } from "@chakra-ui/react"; +import { forwardRef } from "react"; -import { DagsList } from "src/pages/DagsList"; -import { Dashboard } from "src/pages/Dashboard"; - -import { BaseLayout } from "./layouts/BaseLayout"; - -export const App = () => ( - - } path="/"> - } index /> - } path="dags" /> - - +export const ProgressBar = forwardRef( + (props, ref) => ( + + + + + + ), ); diff --git a/airflow/ui/src/components/ui/Select/Content.tsx b/airflow/ui/src/components/ui/Select/Content.tsx new file mode 100644 index 0000000000000..4fbadf7d6df70 --- /dev/null +++ b/airflow/ui/src/components/ui/Select/Content.tsx @@ -0,0 +1,39 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Select as ChakraSelect, Portal } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +type ContentProps = { + portalled?: boolean; + portalRef?: React.RefObject; +} & ChakraSelect.ContentProps; + +export const Content = forwardRef( + (props, ref) => { + const { portalled = true, portalRef, ...rest } = props; + + return ( + + + + + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Select/Item.tsx b/airflow/ui/src/components/ui/Select/Item.tsx new file mode 100644 index 0000000000000..c5ec6de755249 --- /dev/null +++ b/airflow/ui/src/components/ui/Select/Item.tsx @@ -0,0 +1,36 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +import { Select as ChakraSelect } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +export const Item = forwardRef( + (props, ref) => { + const { children, item, ...rest } = props; + + return ( + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + + {children} + + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Select/ItemGroup.tsx b/airflow/ui/src/components/ui/Select/ItemGroup.tsx new file mode 100644 index 0000000000000..e68a10634441f --- /dev/null +++ b/airflow/ui/src/components/ui/Select/ItemGroup.tsx @@ -0,0 +1,37 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Select as ChakraSelect } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +type ItemGroupProps = { + label: React.ReactNode; +} & ChakraSelect.ItemGroupProps; + +export const ItemGroup = forwardRef( + (props, ref) => { + const { children, label, ...rest } = props; + + return ( + + {label} + {children} + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Select/Root.tsx b/airflow/ui/src/components/ui/Select/Root.tsx new file mode 100644 index 0000000000000..7f9ed4b29ca4e --- /dev/null +++ b/airflow/ui/src/components/ui/Select/Root.tsx @@ -0,0 +1,30 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Select as ChakraSelect } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +export const Root = forwardRef( + (props, ref) => ( + + ), +); diff --git a/airflow/ui/src/components/ui/Select/Trigger.tsx b/airflow/ui/src/components/ui/Select/Trigger.tsx new file mode 100644 index 0000000000000..86425135ebf44 --- /dev/null +++ b/airflow/ui/src/components/ui/Select/Trigger.tsx @@ -0,0 +1,52 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Select as ChakraSelect } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +import { CloseButton } from "../CloseButton"; + +type TriggerProps = { + clearable?: boolean; +} & ChakraSelect.ControlProps; + +export const Trigger = forwardRef( + (props, ref) => { + const { children, clearable, ...rest } = props; + + return ( + + {children} + + {clearable ? ( + + + + ) : undefined} + + + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Select/ValueText.tsx b/airflow/ui/src/components/ui/Select/ValueText.tsx new file mode 100644 index 0000000000000..0cdec1b165167 --- /dev/null +++ b/airflow/ui/src/components/ui/Select/ValueText.tsx @@ -0,0 +1,53 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import type { CollectionItem } from "@chakra-ui/react"; +import { Select as ChakraSelect } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +type ValueTextProps = { + children?: (items: Array) => React.ReactNode; +} & Omit; + +export const ValueText = forwardRef( + (props, ref) => { + const { children, ...rest } = props; + + return ( + + + {(select) => { + const items = select.selectedItems; + + if (items.length === 0) { + return props.placeholder; + } + if (children) { + return children(items); + } + if (items.length === 1) { + return select.collection.stringifyItem(items[0]); + } + + return `${items.length} selected`; + }} + + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Select/index.ts b/airflow/ui/src/components/ui/Select/index.ts new file mode 100644 index 0000000000000..c3745338b4240 --- /dev/null +++ b/airflow/ui/src/components/ui/Select/index.ts @@ -0,0 +1,36 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Select as ChakraSelect } from "@chakra-ui/react"; + +import { Content } from "./Content"; +import { Item } from "./Item"; +import { ItemGroup } from "./ItemGroup"; +import { Root } from "./Root"; +import { Trigger } from "./Trigger"; +import { ValueText } from "./ValueText"; + +export const Select = { + ...ChakraSelect, + Content, + Item, + ItemGroup, + Root, + Trigger, + ValueText, +}; diff --git a/airflow/ui/src/components/ui/Switch.tsx b/airflow/ui/src/components/ui/Switch.tsx new file mode 100644 index 0000000000000..db5212137127e --- /dev/null +++ b/airflow/ui/src/components/ui/Switch.tsx @@ -0,0 +1,57 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Switch as ChakraSwitch } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +export type SwitchProps = { + inputProps?: React.InputHTMLAttributes; + rootRef?: React.Ref; + thumbLabel?: { off: React.ReactNode; on: React.ReactNode }; + trackLabel?: { off: React.ReactNode; on: React.ReactNode }; +} & ChakraSwitch.RootProps; + +export const Switch = forwardRef( + (props, ref) => { + const { children, inputProps, rootRef, thumbLabel, trackLabel, ...rest } = + props; + + return ( + + + + + {thumbLabel ? ( + + {thumbLabel.on} + + ) : undefined} + + {trackLabel ? ( + + {trackLabel.on} + + ) : undefined} + + {Boolean(children) && ( + {children} + )} + + ); + }, +); diff --git a/airflow/ui/src/components/ui/Tag.tsx b/airflow/ui/src/components/ui/Tag.tsx new file mode 100644 index 0000000000000..a5a3da6274491 --- /dev/null +++ b/airflow/ui/src/components/ui/Tag.tsx @@ -0,0 +1,55 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Tag as ChakraTag } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +export type TagProps = { + closable?: boolean; + endElement?: React.ReactNode; + onClose?: VoidFunction; + startElement?: React.ReactNode; +} & ChakraTag.RootProps; + +export const Tag = forwardRef((props, ref) => { + const { + children, + onClose, + closable = Boolean(onClose), + endElement, + startElement, + ...rest + } = props; + + return ( + + {Boolean(startElement) ? ( + {startElement} + ) : undefined} + {children} + {Boolean(endElement) ? ( + {endElement} + ) : undefined} + {Boolean(closable) ? ( + + + + ) : undefined} + + ); +}); diff --git a/airflow/ui/src/components/ui/Tooltip.tsx b/airflow/ui/src/components/ui/Tooltip.tsx new file mode 100644 index 0000000000000..2edb43c8b12f1 --- /dev/null +++ b/airflow/ui/src/components/ui/Tooltip.tsx @@ -0,0 +1,66 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { Tooltip as ChakraTooltip, Portal } from "@chakra-ui/react"; +import { forwardRef } from "react"; + +export type TooltipProps = { + content: React.ReactNode; + contentProps?: ChakraTooltip.ContentProps; + disabled?: boolean; + portalled?: boolean; + portalRef?: React.RefObject; + showArrow?: boolean; +} & ChakraTooltip.RootProps; + +export const Tooltip = forwardRef( + (props, ref) => { + const { + children, + content, + contentProps, + disabled, + portalled, + portalRef, + showArrow, + ...rest + } = props; + + if (disabled) { + return children; + } + + return ( + + {children} + + + + {showArrow ? ( + + + + ) : undefined} + {content} + + + + + ); + }, +); diff --git a/airflow/ui/src/components/ui/index.ts b/airflow/ui/src/components/ui/index.ts new file mode 100644 index 0000000000000..1c0922b06e8a2 --- /dev/null +++ b/airflow/ui/src/components/ui/index.ts @@ -0,0 +1,32 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export * from "./Dialog"; +export * from "./Pagination"; +export * from "./Select"; +export * from "./Alert"; +export * from "./CloseButton"; +export * from "./InputGroup"; + +export * from "./Switch"; +export * from "./Tag"; +export * from "./Tooltip"; + +export * from "./ProgressBar"; +export * from "./Menu"; diff --git a/airflow/ui/src/constants/searchParams.ts b/airflow/ui/src/constants/searchParams.ts index 893a4461bffe7..8ca5e0b4f19a9 100644 --- a/airflow/ui/src/constants/searchParams.ts +++ b/airflow/ui/src/constants/searchParams.ts @@ -23,6 +23,7 @@ export enum SearchParamsKeys { OFFSET = "offset", PAUSED = "paused", SORT = "sort", + TAGS = "tags", } export type SearchParamsKeysType = Record< diff --git a/airflow/ui/src/context/colorMode/ColorModeProvider.tsx b/airflow/ui/src/context/colorMode/ColorModeProvider.tsx new file mode 100644 index 0000000000000..4839856c8ee24 --- /dev/null +++ b/airflow/ui/src/context/colorMode/ColorModeProvider.tsx @@ -0,0 +1,24 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { ThemeProvider } from "next-themes"; +import type { ThemeProviderProps } from "next-themes/dist/types"; + +export const ColorModeProvider = (props: ThemeProviderProps) => ( + +); diff --git a/airflow/ui/src/context/colorMode/index.ts b/airflow/ui/src/context/colorMode/index.ts new file mode 100644 index 0000000000000..30edc0a3216e3 --- /dev/null +++ b/airflow/ui/src/context/colorMode/index.ts @@ -0,0 +1,21 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export * from "./ColorModeProvider"; +export * from "./useColorMode"; diff --git a/airflow/ui/src/context/colorMode/useColorMode.tsx b/airflow/ui/src/context/colorMode/useColorMode.tsx new file mode 100644 index 0000000000000..5c9ea1076e9a1 --- /dev/null +++ b/airflow/ui/src/context/colorMode/useColorMode.tsx @@ -0,0 +1,32 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { useTheme } from "next-themes"; + +export const useColorMode = () => { + const { resolvedTheme, setTheme } = useTheme(); + const toggleColorMode = () => { + setTheme(resolvedTheme === "light" ? "dark" : "light"); + }; + + return { + colorMode: resolvedTheme, + setColorMode: setTheme, + toggleColorMode, + }; +}; diff --git a/airflow/ui/src/context/timezone/TimezoneProvider.tsx b/airflow/ui/src/context/timezone/TimezoneProvider.tsx index dfe40f6976706..74c0c2462b390 100644 --- a/airflow/ui/src/context/timezone/TimezoneProvider.tsx +++ b/airflow/ui/src/context/timezone/TimezoneProvider.tsx @@ -16,12 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { - createContext, - useState, - useMemo, - type PropsWithChildren, -} from "react"; +import { createContext, useMemo, type PropsWithChildren } from "react"; +import { useLocalStorage } from "usehooks-ts"; export type TimezoneContextType = { selectedTimezone: string; @@ -35,20 +31,14 @@ export const TimezoneContext = createContext( const TIMEZONE_KEY = "timezone"; export const TimezoneProvider = ({ children }: PropsWithChildren) => { - const [selectedTimezone, setSelectedTimezone] = useState(() => { - const timezone = localStorage.getItem(TIMEZONE_KEY); - - return timezone ?? "UTC"; - }); - - const selectTimezone = (tz: string) => { - localStorage.setItem(TIMEZONE_KEY, tz); - setSelectedTimezone(tz); - }; + const [selectedTimezone, setSelectedTimezone] = useLocalStorage( + TIMEZONE_KEY, + "UTC", + ); const value = useMemo( - () => ({ selectedTimezone, setSelectedTimezone: selectTimezone }), - [selectedTimezone], + () => ({ selectedTimezone, setSelectedTimezone }), + [selectedTimezone, setSelectedTimezone], ); return ( diff --git a/airflow/ui/src/layouts/BaseLayout.tsx b/airflow/ui/src/layouts/BaseLayout.tsx index 4aa7a74de6fc1..848b4ea88f594 100644 --- a/airflow/ui/src/layouts/BaseLayout.tsx +++ b/airflow/ui/src/layouts/BaseLayout.tsx @@ -17,15 +17,16 @@ * under the License. */ import { Box } from "@chakra-ui/react"; +import type { PropsWithChildren } from "react"; import { Outlet } from "react-router-dom"; import { Nav } from "./Nav"; -export const BaseLayout = () => ( +export const BaseLayout = ({ children }: PropsWithChildren) => ( <>