From 3ba670f6d7aa32ce8879066fdca6e0fb86a88d25 Mon Sep 17 00:00:00 2001 From: Leo Fang Date: Mon, 6 Jan 2025 16:55:35 +0000 Subject: [PATCH 1/5] upload artifacts to GHA Cache when merged to main --- .github/workflows/build-and-test.yml | 190 ++++++++++++++++++++++++--- 1 file changed, 171 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index bd7363c8..343a88fc 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -42,6 +42,9 @@ jobs: # (matrix.host-platform == 'win-64' && 'windows-amd64-cpu8') }} outputs: BUILD_CTK_VER: ${{ steps.pass_env.outputs.CUDA_VERSION }} + defaults: + run: + shell: bash --noprofile --norc -xeuo pipefail {0} steps: - name: Checkout ${{ github.event.repository.name }} uses: actions/checkout@v4 @@ -62,7 +65,6 @@ jobs: uses: ilammy/msvc-dev-cmd@v1 - name: Set environment variables - shell: bash --noprofile --norc -xeuo pipefail {0} run: | PYTHON_VERSION_FORMATTED=$(echo '${{ matrix.python-version }}' | tr -d '.') if [[ "${{ matrix.host-platform }}" == linux* ]]; then @@ -75,14 +77,60 @@ jobs: fi echo "PARALLEL_LEVEL=$(nproc)" >> $GITHUB_ENV - echo "CUDA_CORE_ARTIFACT_NAME=cuda-core-python${PYTHON_VERSION_FORMATTED}-${{ matrix.host-platform }}-${{ github.sha }}" >> $GITHUB_ENV + CUDA_CORE_ARTIFACT_BASENAME="cuda-core-python${PYTHON_VERSION_FORMATTED}-${{ matrix.host-platform }}" + echo "CUDA_CORE_ARTIFACT_BASENAME=${CUDA_CORE_ARTIFACT_BASENAME}" >> $GITHUB_ENV + echo "CUDA_CORE_ARTIFACT_NAME=${CUDA_CORE_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_CORE_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_core/dist")" >> $GITHUB_ENV - echo "CUDA_BINDINGS_ARTIFACT_NAME=cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda${{ matrix.cuda-version }}-${{ matrix.host-platform }}-${{ github.sha }}" >> $GITHUB_ENV + CUDA_BINDINGS_ARTIFACT_BASENAME="cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda${{ matrix.cuda-version }}-${{ matrix.host-platform }}" + echo "CUDA_BINDINGS_ARTIFACT_BASENAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}" >> $GITHUB_ENV + echo "CUDA_BINDINGS_ARTIFACT_NAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_BINDINGS_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_bindings/dist")" >> $GITHUB_ENV echo "CIBW_BUILD=${CIBW_BUILD}" >> $GITHUB_ENV - + + # When the CI is run due to merging to main, we want it to populate GHA Cache not Artifacts, + # so that CI workflows running on every branch have a fallback to use. + if [[ "${{ github.ref_name}}" == main ]]; then + echo "USE_CACHE=1" >> $GITHUB_ENV + else + echo "USE_CACHE=0" >> $GITHUB_ENV + fi + + # TODO: revert me before merging; this is to test the cache restore in the PR + echo "USE_CACHE=1" >> $GITHUB_ENV + + - name: Install dependencies + if: ${{ env.USE_CACHE == '1' }} + run: | + # For GHA Cache + dependencies=(zstd) + dependent_exes=(zstd) + + not_found=0 + for dep in ${dependent_exes[@]}; do + if ! (command -v curl 2>&1 >/dev/null); then + not_found=1 + break + fi + done + if [[ $not_found == 0 ]]; then + echo "All dependencies are found. Do nothing." + exit 0 + fi + if ! (command -v sudo 2>&1 >/dev/null); then + if [[ $EUID == 0 ]]; then + alias SUDO="" + else + echo "The following oprations require root access." + exit 1 + fi + else + alias SUDO="sudo" + fi + shopt -s expand_aliases + SUDO apt update + SUDO apt install -y ${dependencies[@]} + - name: Dump environment - shell: bash --noprofile --norc -xeuo pipefail {0} run: | env @@ -97,7 +145,6 @@ jobs: output-dir: ${{ env.CUDA_CORE_ARTIFACTS_DIR }} - name: List the cuda.core artifacts directory - shell: bash --noprofile --norc -xeuo pipefail {0} run: | if [[ "${{ matrix.host-platform }}" == win* ]]; then export CHOWN=chown @@ -108,12 +155,12 @@ jobs: ls -lahR ${{ env.CUDA_CORE_ARTIFACTS_DIR }} - name: Check cuda.core wheel - shell: bash --noprofile --norc -xeuo pipefail {0} run: | pip install twine twine check ${{ env.CUDA_CORE_ARTIFACTS_DIR }}/*.whl - name: Upload cuda.core build artifacts + if: ${{ env.USE_CACHE == '0' }} uses: actions/upload-artifact@v4 with: name: ${{ env.CUDA_CORE_ARTIFACT_NAME }} @@ -121,6 +168,29 @@ jobs: if-no-files-found: error overwrite: 'true' + - name: Prepare cuda.core cache + if: ${{ env.USE_CACHE == '1' }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + if [[ "${{ env.USE_CACHE }}" == 1 ]]; then + # this file is uploaded to GHA Cache + tar -c -f "${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" -C "${{ env.CUDA_CORE_ARTIFACTS_DIR }}" . + du -h "${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" + # check if the previous runs from the same PR have populated the cache, if so need to clean it up + CACHE_KEY=${{ env.CUDA_CORE_ARTIFACT_NAME }} + if [ $(gh cache list | grep $CACHE_KEY | wc -l) == "1" ]; then + gh cache delete $CACHE_KEY + fi + fi + + - name: Cache cuda.core build artifacts + if: ${{ env.USE_CACHE == '1' }} + uses: actions/cache/save@v4 + with: + key: ${{ env.CUDA_CORE_ARTIFACT_NAME }} + path: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz + - name: Set up mini CTK uses: ./.github/actions/fetch_ctk continue-on-error: false @@ -146,7 +216,6 @@ jobs: output-dir: ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }} - name: List the cuda.bindings artifacts directory - shell: bash --noprofile --norc -xeuo pipefail {0} run: | if [[ "${{ matrix.host-platform }}" == win* ]]; then export CHOWN=chown @@ -158,11 +227,27 @@ jobs: # TODO: enable this after NVIDIA/cuda-python#297 is resolved # - name: Check cuda.bindings wheel - # shell: bash --noprofile --norc -xeuo pipefail {0} # run: | # twine check ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}/*.whl + - name: Prepare cuda.bindings cache + if: ${{ env.USE_CACHE == '1' }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + if [[ "${{ env.USE_CACHE }}" == 1 ]]; then + # this file is uploaded to GHA Cache + tar -c -f "${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" -C "${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" . + du -h "${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" + # check if the previous runs from the same PR have populated the cache, if so need to clean it up + CACHE_KEY=${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} + if [ $(gh cache list | grep $CACHE_KEY | wc -l) == "1" ]; then + gh cache delete $CACHE_KEY + fi + fi + - name: Upload cuda.bindings build artifacts + if: ${{ env.USE_CACHE == '0' }} uses: actions/upload-artifact@v4 with: name: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} @@ -170,6 +255,13 @@ jobs: if-no-files-found: error overwrite: 'true' + - name: Cache cuda.bindings build artifacts + if: ${{ env.USE_CACHE == '1' }} + uses: actions/cache/save@v4 + with: + key: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} + path: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz + - name: Pass environment variables to the next runner id: pass_env run: | @@ -205,7 +297,7 @@ jobs: runner: H100 name: Test (${{ matrix.host-platform }}, Python ${{ matrix.python-version }}, CUDA ${{ matrix.cuda-version }}, Runner ${{ matrix.runner }}) # The build stage could fail but we want the CI to keep moving. - if: ${{ github.repository_owner == 'nvidia' && always() }} + if: ${{ github.repository_owner == 'nvidia' && !cancelled() }} permissions: id-token: write # This is required for configure-aws-credentials contents: read # This is required for actions/checkout @@ -221,9 +313,11 @@ jobs: NVIDIA_VISIBLE_DEVICES: ${{ env.NVIDIA_VISIBLE_DEVICES }} needs: - build + defaults: + run: + shell: bash --noprofile --norc -xeuo pipefail {0} steps: - name: Ensure GPU is working - shell: bash --noprofile --norc -xeuo pipefail {0} run: nvidia-smi - name: Checkout ${{ github.event.repository.name }} @@ -232,7 +326,6 @@ jobs: fetch-depth: 0 - name: Set environment variables - shell: bash --noprofile --norc -xeuo pipefail {0} run: | PYTHON_VERSION_FORMATTED=$(echo '${{ matrix.python-version }}' | tr -d '.') if [[ "${{ matrix.host-platform }}" == linux* ]]; then @@ -251,32 +344,93 @@ jobs: fi # make outputs from the previous job as env vars - echo "CUDA_CORE_ARTIFACT_NAME=cuda-core-python${PYTHON_VERSION_FORMATTED}-${{ matrix.host-platform }}-${{ github.sha }}" >> $GITHUB_ENV + CUDA_CORE_ARTIFACT_BASENAME="cuda-core-python${PYTHON_VERSION_FORMATTED}-${{ matrix.host-platform }}" + echo "CUDA_CORE_ARTIFACT_BASENAME=${CUDA_CORE_ARTIFACT_BASENAME}" >> $GITHUB_ENV + echo "CUDA_CORE_ARTIFACT_NAME=${CUDA_CORE_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_CORE_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_core/dist")" >> $GITHUB_ENV - echo "CUDA_BINDINGS_ARTIFACT_NAME=cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda${{ needs.build.outputs.BUILD_CTK_VER }}-${{ matrix.host-platform }}-${{ github.sha }}" >> $GITHUB_ENV + CUDA_BINDINGS_ARTIFACT_BASENAME="cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda${{ needs.build.outputs.BUILD_CTK_VER }}-${{ matrix.host-platform }}" + echo "CUDA_BINDINGS_ARTIFACT_BASENAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}" >> $GITHUB_ENV + echo "CUDA_BINDINGS_ARTIFACT_NAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_BINDINGS_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_bindings/dist")" >> $GITHUB_ENV echo "SKIP_CUDA_BINDINGS_TEST=${SKIP_CUDA_BINDINGS_TEST}" >> $GITHUB_ENV + # We'll try GHA Artifacts first, and then fall back to GHA Cache - name: Download cuda.bindings build artifacts + id: cuda-bindings-download uses: actions/download-artifact@v4 + continue-on-error: true with: name: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} path: ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }} + - name: Restore cuda.bindings cache + if: ${{ steps.cuda-bindings-download.outcome == 'failure' }} + id: cuda-bindings-cache + uses: actions/cache/restore@v4 + with: + key: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} + path: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz + restore-keys: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }} + fail-on-cache-miss: true + + - name: Report cache restore status + if: ${{ steps.cuda-bindings-cache.outcome != 'skipped' }} + run: | + if [[ "${{ steps.cuda-bindings-cache.outputs.cache-hit }}" == true ]]; then + echo "cache is found" + else + echo "cache is not found" + exit 1 + fi + CACHE_DIR="${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" + CACHE_ARCHIVE="${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" + ls -l $CACHE_ARCHIVE + mkdir -p $CACHE_DIR + du -h $CACHE_ARCHIVE && + tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && + rm -f $CACHE_ARCHIVE || exit 1 + - name: Display structure of downloaded cuda.bindings artifacts - shell: bash --noprofile --norc -xeuo pipefail {0} run: | pwd ls -lahR $CUDA_BINDINGS_ARTIFACTS_DIR - name: Download cuda.core build artifacts + id: cuda-core-download uses: actions/download-artifact@v4 + continue-on-error: true with: name: ${{ env.CUDA_CORE_ARTIFACT_NAME }} path: ${{ env.CUDA_CORE_ARTIFACTS_DIR }} + - name: Restore cuda.core cache + if: ${{ steps.cuda-core-download.outcome == 'failure' }} + id: cuda-core-cache + uses: actions/cache/restore@v4 + with: + key: ${{ env.CUDA_CORE_ARTIFACT_NAME }} + path: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz + restore-keys: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }} + fail-on-cache-miss: true + + - name: Report cache restore status + if: ${{ steps.cuda-core-cache.outcome != 'skipped' }} + run: | + if [[ "${{ steps.cuda-core-cache.outputs.cache-hit }}" == true ]]; then + echo "cache is found" + else + echo "cache is not found" + exit 1 + fi + CACHE_DIR="${{ env.CUDA_CORE_ARTIFACTS_DIR }}" + CACHE_ARCHIVE="${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" + ls -l $CACHE_ARCHIVE + mkdir -p $CACHE_DIR + du -h $CACHE_ARCHIVE && + tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && + rm -f $CACHE_ARCHIVE || exit 1 + - name: Display structure of downloaded cuda.core build artifacts - shell: bash --noprofile --norc -xeuo pipefail {0} run: | pwd ls -lahR $CUDA_CORE_ARTIFACTS_DIR @@ -298,7 +452,6 @@ jobs: - name: Run cuda.bindings tests if: ${{ env.SKIP_CUDA_BINDINGS_TEST == '0' }} - shell: bash --noprofile --norc -xeuo pipefail {0} run: | ls $CUDA_PATH @@ -321,7 +474,6 @@ jobs: popd - name: Run cuda.core tests - shell: bash --noprofile --norc -xeuo pipefail {0} run: | if [[ ${{ matrix.python-version }} == "3.13" ]]; then # TODO: remove this hack once cuda-python has a cp313 build @@ -346,7 +498,7 @@ jobs: doc: name: Docs # The build stage could fail but we want the CI to keep moving. - if: ${{ github.repository_owner == 'nvidia' && always() }} + if: ${{ github.repository_owner == 'nvidia' && !cancelled() }} # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages permissions: id-token: write From af1017948b64e8e000695a5150a5177400e8b59e Mon Sep 17 00:00:00 2001 From: Leo Fang Date: Mon, 6 Jan 2025 16:55:44 +0000 Subject: [PATCH 2/5] also update doc build workflow --- .github/workflows/build-docs.yml | 69 ++++++++++++++++++++++++++++++-- 1 file changed, 66 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 0f6ad311..c082df60 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -11,7 +11,7 @@ jobs: build: name: Build docs # The build stage could fail but we want the CI to keep moving. - if: ${{ github.repository_owner == 'nvidia' && always() }} + if: ${{ github.repository_owner == 'nvidia' && !cancelled() }} # WAR: Building the doc currently requires a GPU (NVIDIA/cuda-python#326,327) runs-on: linux-amd64-gpu-t4-latest-1-testing #runs-on: ubuntu-latest @@ -61,28 +61,91 @@ jobs: REPO_DIR=$(pwd) # make outputs from the previous job as env vars - echo "CUDA_CORE_ARTIFACT_NAME=cuda-core-python${PYTHON_VERSION_FORMATTED}-linux-64-${{ github.sha }}" >> $GITHUB_ENV + CUDA_CORE_ARTIFACT_BASENAME="cuda-core-python${PYTHON_VERSION_FORMATTED}-linux-64" + echo "CUDA_CORE_ARTIFACT_BASENAME=${CUDA_CORE_ARTIFACT_BASENAME}" >> $GITHUB_ENV + echo "CUDA_CORE_ARTIFACT_NAME=${CUDA_CORE_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_CORE_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_core/dist")" >> $GITHUB_ENV - echo "CUDA_BINDINGS_ARTIFACT_NAME=cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda${{ inputs.build_ctk_ver }}-linux-64-${{ github.sha }}" >> $GITHUB_ENV + CUDA_BINDINGS_ARTIFACT_BASENAME="cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda${{ inputs.build_ctk_ver }}-linux-64" + echo "CUDA_BINDINGS_ARTIFACT_BASENAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}" >> $GITHUB_ENV + echo "CUDA_BINDINGS_ARTIFACT_NAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_BINDINGS_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_bindings/dist")" >> $GITHUB_ENV + # We'll try GHA Artifacts first, and then fall back to GHA Cache - name: Download cuda.bindings build artifacts + id: cuda-bindings-download uses: actions/download-artifact@v4 + continue-on-error: true with: name: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} path: ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }} + - name: Restore cuda.bindings cache + if: ${{ steps.cuda-bindings-download.outcome == 'failure' }} + id: cuda-bindings-cache + uses: actions/cache/restore@v4 + with: + key: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} + path: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz + restore-keys: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }} + fail-on-cache-miss: true + + - name: Report cache restore status + if: ${{ steps.cuda-bindings-cache.outcome != 'skipped' }} + run: | + if [[ "${{ steps.cuda-bindings-cache.outputs.cache-hit }}" == true ]]; then + echo "cache is found" + else + echo "cache is not found" + exit 1 + fi + CACHE_DIR="${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" + CACHE_ARCHIVE="${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" + ls -l $CACHE_ARCHIVE + mkdir -p $CACHE_DIR + du -h $CACHE_ARCHIVE && + tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && + rm -f $CACHE_ARCHIVE || exit 1 + - name: Display structure of downloaded cuda.bindings artifacts run: | pwd ls -lahR $CUDA_BINDINGS_ARTIFACTS_DIR - name: Download cuda.core build artifacts + id: cuda-core-download uses: actions/download-artifact@v4 + continue-on-error: true with: name: ${{ env.CUDA_CORE_ARTIFACT_NAME }} path: ${{ env.CUDA_CORE_ARTIFACTS_DIR }} + - name: Restore cuda.core cache + if: ${{ steps.cuda-core-download.outcome == 'failure' }} + id: cuda-core-cache + uses: actions/cache/restore@v4 + with: + key: ${{ env.CUDA_CORE_ARTIFACT_NAME }} + path: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz + restore-keys: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }} + fail-on-cache-miss: true + + - name: Report cache restore status + if: ${{ steps.cuda-core-cache.outcome != 'skipped' }} + run: | + if [[ "${{ steps.cuda-core-cache.outputs.cache-hit }}" == true ]]; then + echo "cache is found" + else + echo "cache is not found" + exit 1 + fi + CACHE_DIR="${{ env.CUDA_CORE_ARTIFACTS_DIR }}" + CACHE_ARCHIVE="${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" + ls -l $CACHE_ARCHIVE + mkdir -p $CACHE_DIR + du -h $CACHE_ARCHIVE && + tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && + rm -f $CACHE_ARCHIVE || exit 1 + - name: Display structure of downloaded cuda.core build artifacts run: | pwd From f3a9991ad81aa3bceaa9dadbf843433f5278d72b Mon Sep 17 00:00:00 2001 From: Leo Fang Date: Mon, 6 Jan 2025 16:55:59 +0000 Subject: [PATCH 3/5] always draft a backport PR to make it easier --- .github/workflows/backport.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 28bb2b41..3f7d6df3 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -27,3 +27,4 @@ jobs: copy_requested_reviewers: true label_pattern: to-be-backported target_branches: 11.8.x + conflict_resolution: draft_commit_conflicts From 499d40c4c8e1350deff9fd289ff13bf5439b2d88 Mon Sep 17 00:00:00 2001 From: Leo Fang Date: Mon, 6 Jan 2025 20:08:38 +0000 Subject: [PATCH 4/5] ensure zstd is installed in all stages that could access GHA cache --- .github/actions/fetch_ctk/action.yml | 34 +++------------ .github/actions/install_unix_deps/action.yml | 45 ++++++++++++++++++++ .github/workflows/build-and-test.yml | 43 ++++++------------- .github/workflows/build-docs.yml | 8 ++++ 4 files changed, 71 insertions(+), 59 deletions(-) create mode 100644 .github/actions/install_unix_deps/action.yml diff --git a/.github/actions/fetch_ctk/action.yml b/.github/actions/fetch_ctk/action.yml index 7b8674ab..5850b4c7 100644 --- a/.github/actions/fetch_ctk/action.yml +++ b/.github/actions/fetch_ctk/action.yml @@ -20,35 +20,11 @@ runs: echo "CTK_CACHE_FILENAME=mini-ctk-${{ inputs.cuda-version }}-${{ inputs.host-platform }}.tar.gz" >> $GITHUB_ENV - name: Install dependencies - shell: bash --noprofile --norc -xeuo pipefail {0} - run: | - dependencies=(zstd curl xz-utils) - dependent_exes=(zstd curl xz) - - not_found=0 - for dep in ${dependent_exes[@]}; do - if ! (command -v curl 2>&1 >/dev/null); then - not_found=1 - break - fi - done - if [[ $not_found == 0 ]]; then - echo "All dependencies are found. Do nothing." - exit 0 - fi - if ! (command -v sudo 2>&1 >/dev/null); then - if [[ $EUID == 0 ]]; then - alias SUDO="" - else - echo "The following oprations require root access." - exit 1 - fi - else - alias SUDO="sudo" - fi - shopt -s expand_aliases - SUDO apt update - SUDO apt install -y ${dependencies[@]} + uses: ./.github/actions/install_unix_deps + continue-on-error: false + with: + dependencies: "zstd curl xz-utils" + dependent_exes: "zstd curl xz" - name: Download CTK cache id: ctk-get-cache diff --git a/.github/actions/install_unix_deps/action.yml b/.github/actions/install_unix_deps/action.yml new file mode 100644 index 00000000..f1ee73ab --- /dev/null +++ b/.github/actions/install_unix_deps/action.yml @@ -0,0 +1,45 @@ +name: Install dependencies on Ubuntu + +description: Install needed dependencies, regardless if using GitHub- or self- hosted runners, container, sudo or not. + +inputs: + dependencies: + required: true + type: string + dependent_exes: + required: true + type: string + +runs: + using: composite + steps: + - name: Install dependencies + shell: bash --noprofile --norc -xeuo pipefail {0} + run: | + dependencies=(${{ inputs.dependencies }}) + dependent_exes=(${{ inputs.dependent_exes }}) + + not_found=0 + for dep in ${dependent_exes[@]}; do + if ! (command -v $dep 2>&1 >/dev/null); then + not_found=1 + break + fi + done + if [[ $not_found == 0 ]]; then + echo "All dependencies are found. Do nothing." + exit 0 + fi + if ! (command -v sudo 2>&1 >/dev/null); then + if [[ $EUID == 0 ]]; then + alias SUDO="" + else + echo "The following oprations require root access." + exit 1 + fi + else + alias SUDO="sudo" + fi + shopt -s expand_aliases + SUDO apt update + SUDO apt install -y ${dependencies[@]} diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 343a88fc..61582557 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -100,35 +100,12 @@ jobs: - name: Install dependencies if: ${{ env.USE_CACHE == '1' }} - run: | + uses: ./.github/actions/install_unix_deps + continue-on-error: false + with: # For GHA Cache - dependencies=(zstd) - dependent_exes=(zstd) - - not_found=0 - for dep in ${dependent_exes[@]}; do - if ! (command -v curl 2>&1 >/dev/null); then - not_found=1 - break - fi - done - if [[ $not_found == 0 ]]; then - echo "All dependencies are found. Do nothing." - exit 0 - fi - if ! (command -v sudo 2>&1 >/dev/null); then - if [[ $EUID == 0 ]]; then - alias SUDO="" - else - echo "The following oprations require root access." - exit 1 - fi - else - alias SUDO="sudo" - fi - shopt -s expand_aliases - SUDO apt update - SUDO apt install -y ${dependencies[@]} + dependencies: "zstd" + dependent_exes: "zstd" - name: Dump environment run: | @@ -354,6 +331,14 @@ jobs: echo "CUDA_BINDINGS_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_bindings/dist")" >> $GITHUB_ENV echo "SKIP_CUDA_BINDINGS_TEST=${SKIP_CUDA_BINDINGS_TEST}" >> $GITHUB_ENV + - name: Install dependencies + uses: ./.github/actions/install_unix_deps + continue-on-error: false + with: + # zstd for GHA Cache, gcc for Cython tests + dependencies: "zstd build-essential" + dependent_exes: "zstd gcc" + # We'll try GHA Artifacts first, and then fall back to GHA Cache - name: Download cuda.bindings build artifacts id: cuda-bindings-download @@ -463,8 +448,6 @@ jobs: pip install -r requirements.txt pytest -rxXs tests/ if [[ "${{ matrix.host-platform }}" == linux* ]]; then - # cython tests require gcc - apt install -y build-essential bash tests/cython/build_tests.sh elif [[ "${{ matrix.host-platform }}" == win* ]]; then # TODO: enable this once win-64 runners are up diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index c082df60..51d5814b 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -31,6 +31,14 @@ jobs: # TODO: cache conda env to speed up the workflow once conda-incubator/setup-miniconda#267 # is resolved + - name: Install dependencies + uses: ./.github/actions/install_unix_deps + continue-on-error: false + with: + # zstd for GHA Cache + dependencies: "zstd" + dependent_exes: "zstd" + - name: Set up miniforge uses: conda-incubator/setup-miniconda@v3 with: From 75e37bd2282d581bf6b473ac3988b8dd3848ad0f Mon Sep 17 00:00:00 2001 From: Leo Fang Date: Wed, 8 Jan 2025 21:44:18 -0500 Subject: [PATCH 5/5] implement a custom download-artifact step to simplify the logic --- .github/BACKPORT_BRANCH | 1 + .github/ISSUE_TEMPLATE/release_checklist.yml | 1 + .github/workflows/backport.yml | 9 +- .github/workflows/build-and-test.yml | 170 ++++--------------- .github/workflows/build-docs.yml | 67 -------- 5 files changed, 43 insertions(+), 205 deletions(-) create mode 100644 .github/BACKPORT_BRANCH diff --git a/.github/BACKPORT_BRANCH b/.github/BACKPORT_BRANCH new file mode 100644 index 00000000..9266e678 --- /dev/null +++ b/.github/BACKPORT_BRANCH @@ -0,0 +1 @@ +11.8.x diff --git a/.github/ISSUE_TEMPLATE/release_checklist.yml b/.github/ISSUE_TEMPLATE/release_checklist.yml index 34b7bcee..ce168c58 100644 --- a/.github/ISSUE_TEMPLATE/release_checklist.yml +++ b/.github/ISSUE_TEMPLATE/release_checklist.yml @@ -12,6 +12,7 @@ body: label: Tasks for cuda-bindings / cuda-python release options: - label: "Push any internal updates for accommodating a new CTK release to the public (**IMPORTANT**: Need to wait for CTK posting!)" + - label: "If it is a major release, create a new branch to backport to and update the branch name [here](../BACKPORT_BRANCH)" - label: Follow the check list for `cuda-core` below for the remaining steps - type: checkboxes diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 3f7d6df3..47e2c531 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -19,6 +19,13 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + + - name: Load branch name + id: get-branch + run: | + OLD_BRANCH=$(cat .github/BACKPORT_BRANCH) + echo "OLD_BRANCH=${OLD_BRANCH}" >> $GITHUB_ENV + - name: Create backport pull requests uses: korthout/backport-action@v3 with: @@ -26,5 +33,5 @@ jobs: copy_labels_pattern: true copy_requested_reviewers: true label_pattern: to-be-backported - target_branches: 11.8.x + target_branches: ${{ fromJSON(env.OLD_BRANCH) }} conflict_resolution: draft_commit_conflicts diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 61582557..98e4827c 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -1,3 +1,4 @@ +# Note: This name is referred to in the test job, so make sure any changes are sync'd up! name: "CI: Build and test" concurrency: @@ -87,26 +88,6 @@ jobs: echo "CUDA_BINDINGS_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_bindings/dist")" >> $GITHUB_ENV echo "CIBW_BUILD=${CIBW_BUILD}" >> $GITHUB_ENV - # When the CI is run due to merging to main, we want it to populate GHA Cache not Artifacts, - # so that CI workflows running on every branch have a fallback to use. - if [[ "${{ github.ref_name}}" == main ]]; then - echo "USE_CACHE=1" >> $GITHUB_ENV - else - echo "USE_CACHE=0" >> $GITHUB_ENV - fi - - # TODO: revert me before merging; this is to test the cache restore in the PR - echo "USE_CACHE=1" >> $GITHUB_ENV - - - name: Install dependencies - if: ${{ env.USE_CACHE == '1' }} - uses: ./.github/actions/install_unix_deps - continue-on-error: false - with: - # For GHA Cache - dependencies: "zstd" - dependent_exes: "zstd" - - name: Dump environment run: | env @@ -137,7 +118,6 @@ jobs: twine check ${{ env.CUDA_CORE_ARTIFACTS_DIR }}/*.whl - name: Upload cuda.core build artifacts - if: ${{ env.USE_CACHE == '0' }} uses: actions/upload-artifact@v4 with: name: ${{ env.CUDA_CORE_ARTIFACT_NAME }} @@ -145,29 +125,6 @@ jobs: if-no-files-found: error overwrite: 'true' - - name: Prepare cuda.core cache - if: ${{ env.USE_CACHE == '1' }} - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - if [[ "${{ env.USE_CACHE }}" == 1 ]]; then - # this file is uploaded to GHA Cache - tar -c -f "${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" -C "${{ env.CUDA_CORE_ARTIFACTS_DIR }}" . - du -h "${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" - # check if the previous runs from the same PR have populated the cache, if so need to clean it up - CACHE_KEY=${{ env.CUDA_CORE_ARTIFACT_NAME }} - if [ $(gh cache list | grep $CACHE_KEY | wc -l) == "1" ]; then - gh cache delete $CACHE_KEY - fi - fi - - - name: Cache cuda.core build artifacts - if: ${{ env.USE_CACHE == '1' }} - uses: actions/cache/save@v4 - with: - key: ${{ env.CUDA_CORE_ARTIFACT_NAME }} - path: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz - - name: Set up mini CTK uses: ./.github/actions/fetch_ctk continue-on-error: false @@ -207,24 +164,7 @@ jobs: # run: | # twine check ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}/*.whl - - name: Prepare cuda.bindings cache - if: ${{ env.USE_CACHE == '1' }} - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - if [[ "${{ env.USE_CACHE }}" == 1 ]]; then - # this file is uploaded to GHA Cache - tar -c -f "${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" -C "${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" . - du -h "${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" - # check if the previous runs from the same PR have populated the cache, if so need to clean it up - CACHE_KEY=${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} - if [ $(gh cache list | grep $CACHE_KEY | wc -l) == "1" ]; then - gh cache delete $CACHE_KEY - fi - fi - - name: Upload cuda.bindings build artifacts - if: ${{ env.USE_CACHE == '0' }} uses: actions/upload-artifact@v4 with: name: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} @@ -232,13 +172,6 @@ jobs: if-no-files-found: error overwrite: 'true' - - name: Cache cuda.bindings build artifacts - if: ${{ env.USE_CACHE == '1' }} - uses: actions/cache/save@v4 - with: - key: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} - path: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz - - name: Pass environment variables to the next runner id: pass_env run: | @@ -322,6 +255,7 @@ jobs: # make outputs from the previous job as env vars CUDA_CORE_ARTIFACT_BASENAME="cuda-core-python${PYTHON_VERSION_FORMATTED}-${{ matrix.host-platform }}" + echo "PYTHON_VERSION_FORMATTED=${PYTHON_VERSION_FORMATTED}" >> $GITHUB_ENV echo "CUDA_CORE_ARTIFACT_BASENAME=${CUDA_CORE_ARTIFACT_BASENAME}" >> $GITHUB_ENV echo "CUDA_CORE_ARTIFACT_NAME=${CUDA_CORE_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_CORE_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_core/dist")" >> $GITHUB_ENV @@ -335,45 +269,39 @@ jobs: uses: ./.github/actions/install_unix_deps continue-on-error: false with: - # zstd for GHA Cache, gcc for Cython tests - dependencies: "zstd build-essential" - dependent_exes: "zstd gcc" + # gcc for Cython tests, jq/wget for artifact fetching + dependencies: "build-essential jq wget" + dependent_exes: "gcc jq wget" - # We'll try GHA Artifacts first, and then fall back to GHA Cache - name: Download cuda.bindings build artifacts - id: cuda-bindings-download + if: ${{ env.SKIP_CUDA_BINDINGS_TEST == '0'}} uses: actions/download-artifact@v4 - continue-on-error: true with: name: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} path: ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }} - - name: Restore cuda.bindings cache - if: ${{ steps.cuda-bindings-download.outcome == 'failure' }} - id: cuda-bindings-cache - uses: actions/cache/restore@v4 - with: - key: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} - path: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz - restore-keys: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }} - fail-on-cache-miss: true - - - name: Report cache restore status - if: ${{ steps.cuda-bindings-cache.outcome != 'skipped' }} + - name: Download cuda.bindings build artifacts from the prior branch + if: ${{ env.SKIP_CUDA_BINDINGS_TEST == '1'}} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - if [[ "${{ steps.cuda-bindings-cache.outputs.cache-hit }}" == true ]]; then - echo "cache is found" - else - echo "cache is not found" - exit 1 - fi - CACHE_DIR="${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" - CACHE_ARCHIVE="${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" - ls -l $CACHE_ARCHIVE - mkdir -p $CACHE_DIR - du -h $CACHE_ARCHIVE && - tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && - rm -f $CACHE_ARCHIVE || exit 1 + # See https://github.com/cli/cli/blob/trunk/docs/install_linux.md#debian-ubuntu-linux-raspberry-pi-os-apt. + # gh is needed for artifact fetching. + mkdir -p -m 755 /etc/apt/keyrings \ + && out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \ + && cat $out | tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \ + && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt update \ + && apt install gh -y + + OLD_BRANCH=$(cat .github/BACKPORT_BRANCH) + OLD_BASENAME="cuda-bindings-python${PYTHON_VERSION_FORMATTED}-cuda*-${{ matrix.host-platform }}*" + LATEST_PRIOR_RUN_ID=$(gh run list -b ${OLD_BRANCH} -L 1 -w "CI: Build and test" -s completed -R NVIDIA/cuda-python --json databaseId | jq '.[]| .databaseId') + gh run download $LATEST_PRIOR_RUN_ID -p ${OLD_BASENAME} -R NVIDIA/cuda-python + ls -al $OLD_BASENAME + mkdir -p "${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" + mv $OLD_BASENAME/*.whl "${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}"/ - name: Display structure of downloaded cuda.bindings artifacts run: | @@ -381,40 +309,11 @@ jobs: ls -lahR $CUDA_BINDINGS_ARTIFACTS_DIR - name: Download cuda.core build artifacts - id: cuda-core-download uses: actions/download-artifact@v4 - continue-on-error: true with: name: ${{ env.CUDA_CORE_ARTIFACT_NAME }} path: ${{ env.CUDA_CORE_ARTIFACTS_DIR }} - - name: Restore cuda.core cache - if: ${{ steps.cuda-core-download.outcome == 'failure' }} - id: cuda-core-cache - uses: actions/cache/restore@v4 - with: - key: ${{ env.CUDA_CORE_ARTIFACT_NAME }} - path: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz - restore-keys: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }} - fail-on-cache-miss: true - - - name: Report cache restore status - if: ${{ steps.cuda-core-cache.outcome != 'skipped' }} - run: | - if [[ "${{ steps.cuda-core-cache.outputs.cache-hit }}" == true ]]; then - echo "cache is found" - else - echo "cache is not found" - exit 1 - fi - CACHE_DIR="${{ env.CUDA_CORE_ARTIFACTS_DIR }}" - CACHE_ARCHIVE="${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" - ls -l $CACHE_ARCHIVE - mkdir -p $CACHE_DIR - du -h $CACHE_ARCHIVE && - tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && - rm -f $CACHE_ARCHIVE || exit 1 - - name: Display structure of downloaded cuda.core build artifacts run: | pwd @@ -458,16 +357,13 @@ jobs: - name: Run cuda.core tests run: | - if [[ ${{ matrix.python-version }} == "3.13" ]]; then - # TODO: remove this hack once cuda-python has a cp313 build - if [[ $SKIP_CUDA_BINDINGS_TEST == 1 ]]; then - echo "Python 3.13 + cuda-python ${{ matrix.cuda-version }} is not supported, skipping the test..." - exit 0 - fi - fi - # If build/test majors match: cuda.bindings is installed in the previous step. - # If mismatch: cuda.bindings is installed from PyPI. + # If mismatch: cuda.bindings is installed from the backport branch. + if [[ "${SKIP_CUDA_BINDINGS_TEST}" == 1 ]]; then + pushd "${CUDA_BINDINGS_ARTIFACTS_DIR}" + pip install *.whl + popd + fi TEST_CUDA_MAJOR="$(cut -d '.' -f 1 <<< ${{ matrix.cuda-version }})" pushd "${CUDA_CORE_ARTIFACTS_DIR}" pip install $(ls *.whl)["cu${TEST_CUDA_MAJOR}"] diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 51d5814b..cafb1fc9 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -31,14 +31,6 @@ jobs: # TODO: cache conda env to speed up the workflow once conda-incubator/setup-miniconda#267 # is resolved - - name: Install dependencies - uses: ./.github/actions/install_unix_deps - continue-on-error: false - with: - # zstd for GHA Cache - dependencies: "zstd" - dependent_exes: "zstd" - - name: Set up miniforge uses: conda-incubator/setup-miniconda@v3 with: @@ -78,82 +70,23 @@ jobs: echo "CUDA_BINDINGS_ARTIFACT_NAME=${CUDA_BINDINGS_ARTIFACT_BASENAME}-${{ github.sha }}" >> $GITHUB_ENV echo "CUDA_BINDINGS_ARTIFACTS_DIR=$(realpath "$REPO_DIR/cuda_bindings/dist")" >> $GITHUB_ENV - # We'll try GHA Artifacts first, and then fall back to GHA Cache - name: Download cuda.bindings build artifacts - id: cuda-bindings-download uses: actions/download-artifact@v4 - continue-on-error: true with: name: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} path: ${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }} - - name: Restore cuda.bindings cache - if: ${{ steps.cuda-bindings-download.outcome == 'failure' }} - id: cuda-bindings-cache - uses: actions/cache/restore@v4 - with: - key: ${{ env.CUDA_BINDINGS_ARTIFACT_NAME }} - path: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz - restore-keys: ${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }} - fail-on-cache-miss: true - - - name: Report cache restore status - if: ${{ steps.cuda-bindings-cache.outcome != 'skipped' }} - run: | - if [[ "${{ steps.cuda-bindings-cache.outputs.cache-hit }}" == true ]]; then - echo "cache is found" - else - echo "cache is not found" - exit 1 - fi - CACHE_DIR="${{ env.CUDA_BINDINGS_ARTIFACTS_DIR }}" - CACHE_ARCHIVE="${{ env.CUDA_BINDINGS_ARTIFACT_BASENAME }}.tar.gz" - ls -l $CACHE_ARCHIVE - mkdir -p $CACHE_DIR - du -h $CACHE_ARCHIVE && - tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && - rm -f $CACHE_ARCHIVE || exit 1 - - name: Display structure of downloaded cuda.bindings artifacts run: | pwd ls -lahR $CUDA_BINDINGS_ARTIFACTS_DIR - name: Download cuda.core build artifacts - id: cuda-core-download uses: actions/download-artifact@v4 - continue-on-error: true with: name: ${{ env.CUDA_CORE_ARTIFACT_NAME }} path: ${{ env.CUDA_CORE_ARTIFACTS_DIR }} - - name: Restore cuda.core cache - if: ${{ steps.cuda-core-download.outcome == 'failure' }} - id: cuda-core-cache - uses: actions/cache/restore@v4 - with: - key: ${{ env.CUDA_CORE_ARTIFACT_NAME }} - path: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz - restore-keys: ${{ env.CUDA_CORE_ARTIFACT_BASENAME }} - fail-on-cache-miss: true - - - name: Report cache restore status - if: ${{ steps.cuda-core-cache.outcome != 'skipped' }} - run: | - if [[ "${{ steps.cuda-core-cache.outputs.cache-hit }}" == true ]]; then - echo "cache is found" - else - echo "cache is not found" - exit 1 - fi - CACHE_DIR="${{ env.CUDA_CORE_ARTIFACTS_DIR }}" - CACHE_ARCHIVE="${{ env.CUDA_CORE_ARTIFACT_BASENAME }}.tar.gz" - ls -l $CACHE_ARCHIVE - mkdir -p $CACHE_DIR - du -h $CACHE_ARCHIVE && - tar -x -f $CACHE_ARCHIVE -C $CACHE_DIR && - rm -f $CACHE_ARCHIVE || exit 1 - - name: Display structure of downloaded cuda.core build artifacts run: | pwd