Skip to content

Cache data

Cache data #308

Workflow file for this run

# Cache GMT remote data files and upload as artifacts
#
# This workflow downloads data files needed by PyGMT tests/documentation from
# the GMT data server and uploads as workflow artifacts which can be accessed
# by other GitHub Actions workflows.
#
# It is scheduled to run every Sunday at 12:00 (UTC). If new remote files are
# needed urgently, maintainers can update the workflow file or the
# 'pygmt/helpers/caching.py' file to refresh the cache.
#
name: Cache data
on:
pull_request:
# Make any changes to the following files to refresh the cache in PRs
paths:
- 'pygmt/helpers/caching.py'
- '.github/workflows/cache_data.yaml'
# Schedule runs on 12 noon every Sunday
schedule:
- cron: '0 12 * * 0'
jobs:
gmt_cache:
name: Cache GMT artifacts
runs-on: macos-latest
defaults:
run:
shell: bash -l {0}
steps:
# Checkout current git repository
- name: Checkout
uses: actions/[email protected]
with:
# fetch all history so that setuptools-scm works
fetch-depth: 0
# Install Micromamba with conda-forge dependencies
- name: Setup Micromamba
uses: mamba-org/[email protected]
with:
environment-name: pygmt
condarc: |
channels:
- conda-forge
- nodefaults
create-args: >-
python=3.12
gmt=6.5.0
numpy
pandas
xarray
netCDF4
packaging
build
# Install the package that we want to test
- name: Install the package
run: |
python -m build --sdist
python -m pip install dist/*
# Download remote files
- name: Download remote data
run: |
python -c "from pygmt.helpers.caching import cache_data; cache_data()"
- name: List downloaded remote files
run: ls -lhR ~/.gmt
# Upload the downloaded files as artifacts to GitHub
- name: Upload artifacts to GitHub
uses: actions/upload-artifact@v4
with:
name: gmt-cache
path: |
~/.gmt/cache
~/.gmt/server
~/.gmt/gmt_data_server.txt
~/.gmt/gmt_hash_server.txt