From ad6af6e55841d6ab60be42514076406cf35850b5 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:28:37 -0800 Subject: [PATCH 01/89] Delete .github/workflows/run_flake8.yml --- .github/workflows/run_flake8.yml | 31 ------------------------------- 1 file changed, 31 deletions(-) delete mode 100644 .github/workflows/run_flake8.yml diff --git a/.github/workflows/run_flake8.yml b/.github/workflows/run_flake8.yml deleted file mode 100644 index a57042c66..000000000 --- a/.github/workflows/run_flake8.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: Run style check -on: pull_request - -jobs: - run-flake8: - runs-on: ubuntu-latest - steps: - - name: Cancel non-latest runs - uses: styfle/cancel-workflow-action@0.11.0 - with: - all_but_latest: true - access_token: ${{ github.token }} - - - uses: actions/checkout@v3 - with: - submodules: 'recursive' - fetch-depth: 0 # tags are required for versioneer to determine the version - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Install flake8 - run: | - python -m pip install --upgrade pip - python -m pip install flake8 - python -m pip list - - - name: Run flake8 - run: flake8 From c03c1f9f1dba7a271044c657bcccc38c53c2edab Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:29:26 -0800 Subject: [PATCH 02/89] Create ruff.yml --- .github/workflows/ruff.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/workflows/ruff.yml diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 000000000..1933fa75e --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,11 @@ +name: Ruff +on: pull_request + +jobs: + ruff: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Run ruff + uses: chartboost/ruff-action@v1 From bd381ccddd1309092e974102c151f9642f542e77 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:29:46 -0800 Subject: [PATCH 03/89] Update check_sphinx_links.yml --- .github/workflows/check_sphinx_links.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/check_sphinx_links.yml b/.github/workflows/check_sphinx_links.yml index e1eddb97a..573f2e15a 100644 --- a/.github/workflows/check_sphinx_links.yml +++ b/.github/workflows/check_sphinx_links.yml @@ -15,13 +15,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' From 92c200b5233d4f42337c32ba20914cf09fbd414e Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:30:03 -0800 Subject: [PATCH 04/89] Update codespell.yml --- .github/workflows/codespell.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 7aa79c9e7..b7bef25db 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -14,6 +14,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 + - name: Codespell uses: codespell-project/actions-codespell@v2 From f8980da36a3c2e0311b1537284209c1b56f2ab1f Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:30:17 -0800 Subject: [PATCH 05/89] Update deploy_release.yml --- .github/workflows/deploy_release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy_release.yml b/.github/workflows/deploy_release.yml index f9abba102..63475d1ce 100644 --- a/.github/workflows/deploy_release.yml +++ b/.github/workflows/deploy_release.yml @@ -10,13 +10,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo with submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' From 586a8081ae49fac80519f7afee39b679f84b30ba Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:30:31 -0800 Subject: [PATCH 06/89] Update generate_test_files.yml --- .github/workflows/generate_test_files.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/generate_test_files.yml b/.github/workflows/generate_test_files.yml index 48e33a0b3..27848909b 100644 --- a/.github/workflows/generate_test_files.yml +++ b/.github/workflows/generate_test_files.yml @@ -17,13 +17,13 @@ jobs: - { name: pynwb-1.5.1, pynwb-version: "1.5.1", python-version: "3.8"} - { name: pynwb-2.1.0, pynwb-version: "2.1.0", python-version: "3.9"} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} From 688eb77ab0b5b1de6ba66be8026376903fb0ed79 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:30:59 -0800 Subject: [PATCH 07/89] Update run_all_tests.yml --- .github/workflows/run_all_tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index c47941c21..9cc989bad 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -50,13 +50,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-ver }} From 2204be810902408d23b933bf55ea46870f951e97 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:32:18 -0800 Subject: [PATCH 08/89] Update run_all_tests.yml --- .github/workflows/run_all_tests.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index 9cc989bad..b795e0cf7 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -105,13 +105,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-ver }} @@ -149,13 +149,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true python-version: ${{ matrix.python-ver }} @@ -206,13 +206,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true activate-environment: ros3 @@ -253,13 +253,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true activate-environment: ros3 From 7ccde59b259adbd39715f4ae96657f1eb5db9593 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:32:58 -0800 Subject: [PATCH 09/89] Update run_coverage.yml --- .github/workflows/run_coverage.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml index 18dc00903..35094d4e0 100644 --- a/.github/workflows/run_coverage.yml +++ b/.github/workflows/run_coverage.yml @@ -32,13 +32,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON }} @@ -63,7 +63,7 @@ jobs: python -m coverage report -m - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: unit files: coverage.xml From 0357321230258ed9e91250c55ac3a9473eb5fea2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:33:14 -0800 Subject: [PATCH 10/89] Update run_dandi_read_tests.yml --- .github/workflows/run_dandi_read_tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/run_dandi_read_tests.yml b/.github/workflows/run_dandi_read_tests.yml index 7148d209e..0d9e165d8 100644 --- a/.github/workflows/run_dandi_read_tests.yml +++ b/.github/workflows/run_dandi_read_tests.yml @@ -17,13 +17,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' From 176d08dd6d8fe50c55368ea8f368cdab2c982ef1 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:33:38 -0800 Subject: [PATCH 11/89] Update run_inspector_tests.yml --- .github/workflows/run_inspector_tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/run_inspector_tests.yml b/.github/workflows/run_inspector_tests.yml index da7efff5f..9f57f9798 100644 --- a/.github/workflows/run_inspector_tests.yml +++ b/.github/workflows/run_inspector_tests.yml @@ -15,13 +15,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' From e526c541970e3aab017a1d367c11850a94727848 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 22 Feb 2024 07:34:53 -0800 Subject: [PATCH 12/89] Update run_tests.yml --- .github/workflows/run_tests.yml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index e4479a554..c61447998 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -32,13 +32,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-ver }} @@ -89,13 +89,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-ver }} @@ -128,13 +128,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true python-version: ${{ matrix.python-ver }} @@ -182,13 +182,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true activate-environment: ros3 @@ -227,13 +227,13 @@ jobs: all_but_latest: true access_token: ${{ github.token }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true activate-environment: ros3 @@ -271,13 +271,13 @@ jobs: access_token: ${{ github.token }} - name: Checkout repo with submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: 'recursive' fetch-depth: 0 # tags are required for versioneer to determine the version - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' From de5e45bed15ef154bd2b700d7d01e6a4e5654344 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 09:35:56 -0800 Subject: [PATCH 13/89] Update ruff.yml --- .github/workflows/ruff.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml index 1933fa75e..170ecf921 100644 --- a/.github/workflows/ruff.yml +++ b/.github/workflows/ruff.yml @@ -1,11 +1,11 @@ -name: Ruff -on: pull_request +# name: Ruff +# on: pull_request -jobs: - ruff: - runs-on: ubuntu-latest - steps: - - name: Checkout repo - uses: actions/checkout@v4 - - name: Run ruff - uses: chartboost/ruff-action@v1 +# jobs: +# ruff: +# runs-on: ubuntu-latest +# steps: +# - name: Checkout repo +# uses: actions/checkout@v4 +# - name: Run ruff +# uses: chartboost/ruff-action@v1 From 320403a5ad36f184e379b99f97e9c4d0fffef4b6 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:11:03 -0800 Subject: [PATCH 14/89] Create pyproject.toml --- pyproject.toml | 104 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 pyproject.toml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..0b82e3503 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,104 @@ +[build-system] +requires = ["hatchling", "hatch-vcs"] # hatchling for build | hatch-vcs for versioning +build-backend = "hatchling.build" # the build backend used + +[project] +name = "pynwb" +authors = [ + { name="Andrew Tritt", email="ajtritt@lbl.gov" }, + { name="Ryan Ly", email="rly@lbl.gov" }, + { name="Oliver Ruebel", email="oruebel@lbl.gov" }, + { name="Ben Dichter", email="ben.dichter@gmail.com" }, + { name="Matthew Avaylon", email="mavaylon@lbl.gov" } +] +description= "Package for working with Neurodata stored in the NWB format." +readme = "README.rst" +requires-python = ">=3.8" +license = {text = "BSD-3-Clause"} +classifiers = [ + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: BSD License", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Operating System :: Microsoft :: Windows", + "Operating System :: MacOS", + "Operating System :: Unix", + "Topic :: Scientific/Engineering :: Medical Science Apps." +] +dependencies = [ + "h5py>=2.10", + "hdmf>=3.12.2", + "numpy>=1.18", # match the version used in hdmf + "pandas>=1.1.5", + "python-dateutil>=2.7.3", +] +dynamic = ["version"] # the build backend will compute the version dynamically from git tag (or a __version__) + +[project.optional-dependencies] +# Add optional dependencies here + +[project.urls] +"Homepage" = "https://github.com/NeurodataWithoutBorders/pynwb" +"Bug Tracker" = "https://github.com/NeurodataWithoutBorders/pynwb/issues" + +[tool.hatch.version] +source = "vcs" + +[tool.hatch.build.hooks.vcs] +# this file is created/updated when the package is installed and used in +# src/pynwb/__init__.py to set `__version__` (from _version.py). +version-file = "src/pynwb/_version.py" + +[tool.hatch.build.targets.wheel] +packages = ["src/pynwb"] + +[tool.pytest.ini_options] +# Addopts creates a shortcut for pytest. For example below, running `pytest` will actually run `pytest --cov --cov-report html`. +addopts = "--cov --cov-report html" # generates coverage report in html format without showing anything on the terminal. + +[tool.codespell] +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema" +ignore-words-list = "datas" + +[tool.coverage.run] +branch = true +source = ["src/"] +omit = [ + "src/pynwb/_due.py", + "src/pynwb/testing/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "@abstract" +] + +[tool.ruff] +select = ["E", "F", "T100", "T201", "T203"] +exclude = [ + ".git", + ".tox", + "__pycache__", + "build/", + "dist/", + "src/nwb-schema", + "docs/source/conf.py", + "src/pynwb/_due.py", + "docs/source/tutorials/", +] +line-length = 120 + +[tool.ruff.per-file-ignores] +"docs/gallery/*" = ["E402", "T201"] +"src/*/__init__.py" = ["F401"] +"test_gallery.py" = ["T201"] + +[tool.ruff.mccabe] +max-complexity = 17 + From 88e2a9b13f3cb1e1d067cd2913f770e7b03c864b Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:15:08 -0800 Subject: [PATCH 15/89] Update install_developers.rst --- docs/source/install_developers.rst | 29 +++++------------------------ 1 file changed, 5 insertions(+), 24 deletions(-) diff --git a/docs/source/install_developers.rst b/docs/source/install_developers.rst index b2c2d18e7..15816397a 100644 --- a/docs/source/install_developers.rst +++ b/docs/source/install_developers.rst @@ -83,39 +83,20 @@ editable mode. $ git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git $ cd pynwb - $ pip install -r requirements.txt + $ pip install -r requirements.txt -r requirements-dev.txt $ pip install -e . Run tests --------- -For running the tests, it is required to install the development requirements. Again, first activate your -virtualenv or conda environment. - -.. code:: - - $ git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git - $ cd pynwb - $ pip install -r requirements.txt -r requirements-dev.txt - $ pip install -e . - $ tox - -For debugging it can be useful to keep the intermediate NWB files created by -the tests. To keep these files create the environment variables -``CLEAN_NWB``/``CLEAN_HDMF`` and set them to ``1``. +You can run the full test suite by running: +.. code:: bash -Following PyNWB Style Guide ---------------------------- - -Before you create a Pull Request, make sure you are following PyNWB style guide -(`PEP8 `_). To do that simply run -the following command in the project's root directory. - -.. code:: + pytest - $ flake8 +This will run all the tests and compute the test coverage. The coverage report can be found in ``/htmlcov``. FAQ From 82cfb2aea2c3cdbe191ae0e025c51e09334ce742 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:18:06 -0800 Subject: [PATCH 16/89] Update release.md --- .github/PULL_REQUEST_TEMPLATE/release.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE/release.md b/.github/PULL_REQUEST_TEMPLATE/release.md index 8a15d5ee2..9fe11241f 100644 --- a/.github/PULL_REQUEST_TEMPLATE/release.md +++ b/.github/PULL_REQUEST_TEMPLATE/release.md @@ -2,15 +2,15 @@ Prepare for release of PyNWB [version] ### Before merging: - [ ] Major and minor releases: Update package versions in `requirements.txt`, `requirements-dev.txt`, - `requirements-doc.txt`, `requirements-min.txt`, `environment-ros3.yml`, and `setup.py` as needed. + `requirements-doc.txt`, `requirements-min.txt`, and `environment-ros3.yml` as needed. - [ ] Check legal file dates and information in `Legal.txt`, `license.txt`, `README.rst`, `docs/source/conf.py`, and any other locations as needed -- [ ] Update `setup.py` as needed +- [ ] Update `pyproject.toml` as needed - [ ] Update `README.rst` as needed - [ ] Update `src/pynwb/nwb-schema` submodule as needed. Check the version number and commit SHA manually - [ ] Update changelog (set release date) in `CHANGELOG.md` and any other docs as needed - [ ] Run tests locally including gallery, validation, and streaming tests, and inspect all warnings and outputs - (`python test.py -v -p -i -b -w -r > out.txt 2>&1`) + (`python test.py`) - [ ] Test docs locally (`make clean`, `make html`) - [ ] Push changes to this PR and make sure all PRs to be included in this release have been merged - [ ] Check that the readthedocs build for this PR succeeds (build latest to pull the new branch, then activate and From b6709ec4023f2e55c406c35d868683aebb289929 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:19:50 -0800 Subject: [PATCH 17/89] Update MANIFEST.in --- MANIFEST.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 04c3ad5c8..18d92c68e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,5 @@ -include license.txt Legal.txt versioneer.py src/pynwb/_version.py src/pynwb/_due.py +include license.txt Legal.txt src/pynwb/_due.py include requirements.txt requirements-dev.txt requirements-doc.txt requirements-min.txt environment-ros3.yml -include test.py tox.ini +include tox.ini graft tests From 098d5eb95ddebbf106027600ca29b93161eeed0a Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:20:30 -0800 Subject: [PATCH 18/89] Delete Makefile --- Makefile | 83 -------------------------------------------------------- 1 file changed, 83 deletions(-) delete mode 100644 Makefile diff --git a/Makefile b/Makefile deleted file mode 100644 index 4005c8915..000000000 --- a/Makefile +++ /dev/null @@ -1,83 +0,0 @@ -PYTHON = python -FLAKE = flake8 -COVERAGE = coverage - -help: - @echo "Please use \`make ' where is one of" - @echo " init to install required packages" - @echo " build to build the python package(s)" - @echo " install to build and install the python package(s)" - @echo " develop to build and install the python package(s) for development" - @echo " test to run all integration and unit tests" - @echo " htmldoc to make the HTML documentation and open it with the default browser" - @echo " coverage to run tests, build coverage HTML report and open it with the default browser" - @echo "" - @echo "Advanced targets" - @echo " apidoc to generate API docs *.rst files from sources" - @echo " coverage-only to run tests and build coverage report" - @echo " coverage-open to open coverage HTML report in the default browser" - @echo " htmlclean to remove all generated documentation" - @echo " htmldoc-only to make the HTML documentation" - @echo " htmldoc-open to open the HTML documentation with the default browser" - @echo " pdfdoc to make the LaTeX sources and build the PDF of the documentation" - -init: - pip install -r requirements.txt -r requirements-dev.txt -r requirements-doc.txt - -build: - $(PYTHON) setup.py build - -install: build - $(PYTHON) setup.py install - -develop: build - $(PYTHON) setup.py develop - -test: - pip install -r requirements-dev.txt - tox - -flake: - $(FLAKE) --exclude=nwb-schema src/ - $(FLAKE) tests/ - $(FLAKE) --ignore E402,W504 docs/gallery - -checkpdb: - find {src,tests} -name "*.py" -exec grep -Hn -e pdb -e breakpoint -e print {} \; - -devtest: - $(PYTHON) test.py - -testclean: - rm *.npy *.nwb *.yaml - -apidoc: - pip install -r requirements-doc.txt - cd docs && $(MAKE) apidoc - -htmldoc-only: apidoc - cd docs && $(MAKE) html - -htmlclean: - cd docs && $(MAKE) clean - -htmldoc-open: - @echo "" - @echo "To view the HTML documentation open: docs/_build/html/index.html" - open docs/_build/html/index.html || xdg-open docs/_build/html/index.html - -htmldoc: htmldoc-only htmldoc-open - -pdfdoc: - cd docs && $(MAKE) latexpdf - @echo "" - @echo "To view the PDF documentation open: docs/_build/latex/PyNWB.pdf" - -coverage-only: - tox -e localcoverage - -coverage-open: - @echo "To view coverage data open: ./tests/coverage/htmlcov/index.html" - open ./tests/coverage/htmlcov/index.html || xdg-open ./tests/coverage/htmlcov/index.html - -coverage: coverage-only coverage-open From 4eb34dbf34387031d2d50b496deb172584dd678d Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:25:13 -0800 Subject: [PATCH 19/89] Update README.rst --- README.rst | 30 ++++++++---------------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/README.rst b/README.rst index 14eb32fce..d554207cb 100644 --- a/README.rst +++ b/README.rst @@ -13,35 +13,21 @@ Latest Release .. image:: https://anaconda.org/conda-forge/pynwb/badges/version.svg :target: https://anaconda.org/conda-forge/pynwb -Code Coverage -============== - -.. image:: https://github.com/NeurodataWithoutBorders/pynwb/workflows/Run%20coverage/badge.svg - :target: https://github.com/NeurodataWithoutBorders/pynwb/actions?query=workflow%3A%22Run+coverage%22 - -Overall test coverage - -.. image:: https://codecov.io/gh/NeurodataWithoutBorders/pynwb/branch/dev/graph/badge.svg - :target: https://codecov.io/gh/NeurodataWithoutBorders/pynwb - -Unit test coverage - -.. image:: https://codecov.io/gh/NeurodataWithoutBorders/pynwb/branch/dev/graph/badge.svg?flag=unit - :target: https://codecov.io/gh/NeurodataWithoutBorders/pynwb - -Integration test coverage - -.. image:: https://codecov.io/gh/NeurodataWithoutBorders/pynwb/branch/dev/graph/badge.svg?flag=integration - :target: https://codecov.io/gh/NeurodataWithoutBorders/pynwb Overall Health ============== +.. image:: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/run_coverage.yml/badge.svg + :target: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/run_coverage.yml + .. image:: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/run_tests.yml/badge.svg :target: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/run_tests.yml -.. image:: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/run_flake8.yml/badge.svg - :target: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/run_flake8.yml +.. image:: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/codespell.yml/badge.svg + :target: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/codespell.yml + +.. image:: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/ruff.yml/badge.svg + :target: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/ruff.yml .. image:: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/check_external_links.yml/badge.svg :target: https://github.com/NeurodataWithoutBorders/pynwb/actions/workflows/check_external_links.yml From 1361b7b2effa26dae239f7eeecccdcf562baccb2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:27:17 -0800 Subject: [PATCH 20/89] Update CONTRIBUTING.rst --- docs/CONTRIBUTING.rst | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst index f774f49bf..278a06ffc 100644 --- a/docs/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -115,7 +115,7 @@ Projects are currently used mainly on the NeurodataWithoutBorders organization l .. _sec-styleguides: -Styleguides +Style Guides ----------- Git Commit Message Styleguide @@ -145,7 +145,23 @@ Format Specification Styleguide Python Code Styleguide ^^^^^^^^^^^^^^^^^^^^^^ -Python coding style is checked via ``flake8`` for automatic checking of PEP8 style during pull requests. +Before you create a Pull Request, make sure you are following the HDMF style guide. +To check whether your code conforms to the HDMF style guide, simply run the ruff_ tool in the project's root +directory. ``ruff`` will also sort imports automatically and check against additional code style rules. + +We also use ``ruff`` to sort python imports automatically and double-check that the codebase +conforms to PEP8 standards, while using the codespell_ tool to check spelling. + +``ruff`` and ``codespell`` are installed when you follow the developer installation instructions. See +:ref:`install_developers`. + +.. _ruff: https://beta.ruff.rs/docs/ +.. _codespell: https://github.com/codespell-project/codespell + +.. code:: + + $ ruff check . + $ codespell Endorsement ----------- From d9e3d91ecd5d09bc7be951dda6190509a5c12747 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:29:42 -0800 Subject: [PATCH 21/89] Update conf.py --- docs/source/conf.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 6b101c23c..6ceee9d50 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,8 +33,7 @@ # version is used. sys.path.insert(0, os.path.join(project_root, 'src')) -from pynwb._version import get_versions - +import pynwb # -- Autodoc configuration ----------------------------------------------------- @@ -191,9 +190,9 @@ def __call__(self, filename): # built documents. # # The short X.Y version. -version = '{}'.format(get_versions()['version']) +version = pynwb.__version__ # The full version, including alpha/beta/rc tags. -release = '{}'.format(get_versions()['version']) +release = pynwb.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 735a71ce43f41e0fda8bc2bce91e16d4ab5dede2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:37:35 -0800 Subject: [PATCH 22/89] Update requirements-dev.txt --- requirements-dev.txt | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index a19b50bd3..c9f442640 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,13 +2,11 @@ # compute coverage, and create test environments. note that depending on the version of python installed, different # versions of requirements may be installed due to package incompatibilities. # -black==23.3.0 -codespell==2.2.4 -coverage==7.2.2 -flake8==6.0.0 -flake8-debugger==4.1.2 -flake8-print==5.0.0 +black==23.10.1 +codespell==2.2.6 +coverage==7.3.2 +pytest==7.4.3 isort==5.12.0 -pytest==7.1.2 -pytest-cov==4.0.0 -tox==4.4.8 +pytest-cov==4.1.0 +tox==4.11.3 +ruff==0.1.3 From bce6ef2c4c4a921e756a289a97793e04292a048f Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:38:13 -0800 Subject: [PATCH 23/89] Update requirements-min.txt --- requirements-min.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements-min.txt b/requirements-min.txt index 0e8bde429..f6b765b0b 100644 --- a/requirements-min.txt +++ b/requirements-min.txt @@ -4,4 +4,3 @@ hdmf==3.12.2 numpy==1.18 pandas==1.1.5 python-dateutil==2.7.3 -setuptools From 418017dbfd21c4bce3eba096d1dc507331304bc7 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:38:37 -0800 Subject: [PATCH 24/89] Update requirements.txt --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ad5d748bd..836052317 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,3 @@ hdmf==3.12.2 numpy==1.26.1 pandas==2.1.2 python-dateutil==2.8.2 -setuptools==65.5.1 \ No newline at end of file From 867f794a74691e0a4029fe98215d4e7ed12adc94 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:39:36 -0800 Subject: [PATCH 25/89] Delete versioneer.py --- versioneer.py | 2194 ------------------------------------------------- 1 file changed, 2194 deletions(-) delete mode 100644 versioneer.py diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 223db1d9c..000000000 --- a/versioneer.py +++ /dev/null @@ -1,2194 +0,0 @@ - -# Version: 0.26 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain (Unlicense) -* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -Versioneer provides two installation modes. The "classic" vendored mode installs -a copy of versioneer into your repository. The experimental build-time dependency mode -is intended to allow you to skip this step and simplify the process of upgrading. - -### Vendored mode - -* `pip install versioneer` to somewhere in your $PATH -* add a `[tool.versioneer]` section to your `pyproject.toml or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) -* run `versioneer install --vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -### Build-time dependency mode - -* `pip install versioneer` to somewhere in your $PATH -* add a `[tool.versioneer]` section to your `pyproject.toml or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) -* add `versioneer` to the `requires` key of the `build-system` table in - `pyproject.toml`: - ```toml - [build-system] - requires = ["setuptools", "versioneer"] - build-backend = "setuptools.build_meta" - ``` -* run `versioneer install --no-vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg` and `pyproject.toml`, if necessary, - to include any new configuration settings indicated by the release notes. - See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install --[no-]vendor` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from pathlib import Path -from typing import Callable, Dict -import functools -try: - import tomli - have_tomli = True -except ImportError: - have_tomli = False - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - root = Path(root) - pyproject_toml = root / "pyproject.toml" - setup_cfg = root / "setup.cfg" - section = None - if pyproject_toml.exists() and have_tomli: - try: - with open(pyproject_toml, 'rb') as fobj: - pp = tomli.load(fobj) - section = pp['tool']['versioneer'] - except (tomli.TOMLDecodeError, KeyError): - pass - if not section: - parser = configparser.ConfigParser() - with open(setup_cfg) as cfg_file: - parser.read_file(cfg_file) - parser.get("versioneer", "VCS") # raise error if missing - - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = section['VCS'] - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""', None): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.26 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [versionfile_source] - if ipy: - files.append(ipy) - if "VERSIONEER_PEP518" not in globals(): - try: - my_path = __file__ - if my_path.endswith(".pyc") or my_path.endswith(".pyo"): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.26) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to setuptools - from setuptools import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # pip install -e . and setuptool/editable_wheel will invoke build_py - # but the build_py command is not expected to copy any files. - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - else: - from setuptools.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - if getattr(self, "editable_mode", False): - # During editable installs `.py` and data files are - # not copied to build_lib - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - else: - from setuptools.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - if not os.path.exists(target_versionfile): - print(f"Warning: {target_versionfile} does not exist, skipping " - "version update. This can happen if you are running build_ext " - "without first running build_py.") - return - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.setuptools_buildexe import py2exe as _py2exe - except ImportError: - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # sdist farms its file list building out to egg_info - if 'egg_info' in cmds: - _sdist = cmds['egg_info'] - else: - from setuptools.command.egg_info import egg_info as _egg_info - - class cmd_egg_info(_egg_info): - def find_sources(self): - # egg_info.find_sources builds the manifest list and writes it - # in one shot - super().find_sources() - - # Modify the filelist and normalize it - root = get_root() - cfg = get_config_from_root(root) - self.filelist.append('versioneer.py') - if cfg.versionfile_source: - # There are rare cases where versionfile_source might not be - # included by default, so we must be explicit - self.filelist.append(cfg.versionfile_source) - self.filelist.sort() - self.filelist.remove_duplicates() - - # The write method is hidden in the manifest_maker instance that - # generated the filelist and was thrown away - # We will instead replicate their final normalization (to unicode, - # and POSIX-style paths) - from setuptools import unicode_utils - normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') - for f in self.filelist.files] - - manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') - with open(manifest_filename, 'w') as fobj: - fobj.write('\n'.join(normalized)) - - cmds['egg_info'] = cmd_egg_info - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - else: - from setuptools.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -def setup_command(): - """Set up Versioneer and exit with appropriate error code.""" - errors = do_setup() - errors += scan_setup_py() - sys.exit(1 if errors else 0) - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - setup_command() From 336252bcbd115e506b1476c36a80a7d04df98baa Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:39:44 -0800 Subject: [PATCH 26/89] Delete setup.cfg --- setup.cfg | 42 ------------------------------------------ 1 file changed, 42 deletions(-) delete mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index d44fcc2b1..000000000 --- a/setup.cfg +++ /dev/null @@ -1,42 +0,0 @@ -[versioneer] -VCS = git -versionfile_source = src/pynwb/_version.py -versionfile_build = pynwb/_version.py -tag_prefix = '' -style = pep440-pre - -[flake8] -max-line-length = 120 -max-complexity = 17 -exclude = - .git, - .tox, - __pycache__, - build/, - dist/, - src/pynwb/nwb-schema/ - docs/_build/, - docs/source/conf.py - docs/source/tutorials/ - versioneer.py, - src/pynwb/_version.py - src/pynwb/_due.py -per-file-ignores = - docs/gallery/*:E402,E501,T201 - docs/source/tutorials/*:E402,T201 - src/pynwb/io/__init__.py:F401 - src/pynwb/legacy/io/__init__.py:F401 - tests/integration/__init__.py:F401 - src/pynwb/testing/__init__.py:F401 - src/pynwb/validate.py:T201 - tests/read_dandi/read_first_nwb_asset.py:T201 - setup.py:T201 - test.py:T201 - scripts/*:T201 -extend-ignore = E203 - -[metadata] -description_file = README.rst - -[isort] -profile = black From e4b3cbe3c70d7ee34fcdc56f0e678d9c275b83c8 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:39:53 -0800 Subject: [PATCH 27/89] Delete setup.py --- setup.py | 81 -------------------------------------------------------- 1 file changed, 81 deletions(-) delete mode 100755 setup.py diff --git a/setup.py b/setup.py deleted file mode 100755 index 2a9ecb19e..000000000 --- a/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- - -import sys - -from setuptools import setup, find_packages - -# Some Python installations don't add the current directory to path. -if '' not in sys.path: - sys.path.insert(0, '') - -import versioneer - -with open('README.rst', 'r') as fp: - readme = fp.read() - -pkgs = find_packages('src', exclude=['data']) -print('found these packages:', pkgs) - -schema_dir = 'nwb-schema/core' - -reqs = [ - 'h5py>=2.10', - 'hdmf>=3.12.2', - 'numpy>=1.16', - 'pandas>=1.1.5', - 'python-dateutil>=2.7.3', - 'setuptools' -] - -print(reqs) - -setup_args = { - 'name': 'pynwb', - 'version': versioneer.get_version(), - 'cmdclass': versioneer.get_cmdclass(), - 'description': 'Package for working with Neurodata stored in the NWB format', - 'long_description': readme, - 'long_description_content_type': 'text/x-rst; charset=UTF-8', - 'author': 'Andrew Tritt', - 'author_email': 'ajtritt@lbl.gov', - 'url': 'https://github.com/NeurodataWithoutBorders/pynwb', - 'license': "BSD", - 'install_requires': reqs, - 'packages': pkgs, - 'package_dir': {'': 'src'}, - 'package_data': {'pynwb': ["%s/*.yaml" % schema_dir, "%s/*.json" % schema_dir]}, - 'python_requires': '>=3.8', - 'classifiers': [ - "Programming Language :: Python", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "License :: OSI Approved :: BSD License", - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Operating System :: Microsoft :: Windows", - "Operating System :: MacOS", - "Operating System :: Unix", - "Topic :: Scientific/Engineering :: Medical Science Apps." - ], - 'keywords': 'Neuroscience ' - 'python ' - 'HDF ' - 'HDF5 ' - 'cross-platform ' - 'open-data ' - 'data-format ' - 'open-source ' - 'open-science ' - 'reproducible-research ' - 'PyNWB ' - 'NWB ' - 'NWB:N ' - 'NeurodataWithoutBorders', - 'zip_safe': False -} - -if __name__ == '__main__': - setup(**setup_args) From 5fabcca364ec55476724debd76ccc045edc0b302 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:40:01 -0800 Subject: [PATCH 28/89] Delete test.py --- test.py | 397 -------------------------------------------------------- 1 file changed, 397 deletions(-) delete mode 100755 test.py diff --git a/test.py b/test.py deleted file mode 100755 index 16191ae3f..000000000 --- a/test.py +++ /dev/null @@ -1,397 +0,0 @@ -#!/usr/bin/env python -import warnings -import re -import argparse -import glob -import inspect -import logging -import os.path -import os -from subprocess import run, PIPE, STDOUT -import sys -import traceback -import unittest - -flags = { - 'pynwb': 2, - 'integration': 3, - 'example': 4, - 'backwards': 5, - 'validate-examples': 6, - 'ros3': 7, - 'example-ros3': 8, - 'validation-module': 9 -} - -TOTAL = 0 -FAILURES = 0 -ERRORS = 0 - - -class SuccessRecordingResult(unittest.TextTestResult): - '''A unittest test result class that stores successful test cases as well - as failures and skips. - ''' - - def addSuccess(self, test): - if not hasattr(self, 'successes'): - self.successes = [test] - else: - self.successes.append(test) - - def get_all_cases_run(self): - '''Return a list of each test case which failed or succeeded - ''' - cases = [] - - if hasattr(self, 'successes'): - cases.extend(self.successes) - cases.extend([failure[0] for failure in self.failures]) - - return cases - - -def run_test_suite(directory, description="", verbose=True): - global TOTAL, FAILURES, ERRORS - logging.info("running %s" % description) - directory = os.path.join(os.path.dirname(__file__), directory) - runner = unittest.TextTestRunner(verbosity=verbose, resultclass=SuccessRecordingResult) - # set top_level_dir below to prevent import name clashes between - # tests/unit/test_base.py and tests/integration/hdf5/test_base.py - test_result = runner.run(unittest.TestLoader().discover(directory, top_level_dir='tests')) - - TOTAL += test_result.testsRun - FAILURES += len(test_result.failures) - ERRORS += len(test_result.errors) - - return test_result - - -def _import_from_file(script): - import imp - return imp.load_source(os.path.basename(script), script) - - -warning_re = re.compile("Parent module '[a-zA-Z0-9]+' not found while handling absolute import") - - -ros3_examples = [ - os.path.join('general', 'read_basics.py'), - os.path.join('advanced_io', 'streaming.py'), -] - -allensdk_examples = [ - os.path.join('domain', 'brain_observatory.py'), # TODO create separate workflow for this -] - - -def run_example_tests(): - """Run the Sphinx gallery example files, excluding ROS3-dependent ones, to check for errors.""" - logging.info('running example tests') - examples_scripts = list() - for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), "docs", "gallery")): - for f in files: - if f.endswith(".py"): - name_with_parent_dir = os.path.join(os.path.basename(root), f) - if name_with_parent_dir in ros3_examples or name_with_parent_dir in allensdk_examples: - logging.info("Skipping %s" % name_with_parent_dir) - continue - examples_scripts.append(os.path.join(root, f)) - - __run_example_tests_helper(examples_scripts) - - -def run_example_ros3_tests(): - """Run the Sphinx gallery example files that depend on ROS3 to check for errors.""" - logging.info('running example ros3 tests') - examples_scripts = list() - for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), "docs", "gallery")): - for f in files: - if f.endswith(".py"): - name_with_parent_dir = os.path.join(os.path.basename(root), f) - if name_with_parent_dir not in ros3_examples: - logging.info("Skipping %s" % name_with_parent_dir) - continue - examples_scripts.append(os.path.join(root, f)) - - __run_example_tests_helper(examples_scripts) - - -def __run_example_tests_helper(examples_scripts): - global TOTAL, FAILURES, ERRORS - TOTAL += len(examples_scripts) - for script in examples_scripts: - try: - logging.info("Executing %s" % script) - ws = list() - with warnings.catch_warnings(record=True) as tmp: - _import_from_file(script) - for w in tmp: # ignore RunTimeWarnings about importing - if isinstance(w.message, RuntimeWarning) and not warning_re.match(str(w.message)): - ws.append(w) - for w in ws: - warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) - except Exception: - print(traceback.format_exc()) - FAILURES += 1 - ERRORS += 1 - - -def validate_nwbs(): - global TOTAL, FAILURES, ERRORS - logging.info('running validation tests on NWB files') - examples_nwbs = glob.glob('*.nwb') - - import pynwb - - for nwb in examples_nwbs: - try: - logging.info("Validating file %s" % nwb) - - ws = list() - with warnings.catch_warnings(record=True) as tmp: - logging.info("Validating with pynwb.validate method.") - with pynwb.NWBHDF5IO(nwb, mode='r') as io: - errors = pynwb.validate(io) - TOTAL += 1 - - if errors: - FAILURES += 1 - ERRORS += 1 - for err in errors: - print("Error: %s" % err) - - def get_namespaces(nwbfile): - comp = run(["python", "-m", "pynwb.validate", - "--list-namespaces", nwbfile], - stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=30) - - if comp.returncode != 0: - return [] - - return comp.stdout.split() - - namespaces = get_namespaces(nwb) - - if len(namespaces) == 0: - FAILURES += 1 - ERRORS += 1 - - cmds = [] - cmds += [["python", "-m", "pynwb.validate", nwb]] - cmds += [["python", "-m", "pynwb.validate", "--no-cached-namespace", nwb]] - - for ns in namespaces: - # for some reason, this logging command is necessary to correctly printing the namespace in the - # next logging command - logging.info("Namespace found: %s" % ns) - cmds += [["python", "-m", "pynwb.validate", "--ns", ns, nwb]] - - for cmd in cmds: - logging.info("Validating with \"%s\"." % (" ".join(cmd[:-1]))) - comp = run(cmd, stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=30) - TOTAL += 1 - - if comp.returncode != 0: - FAILURES += 1 - ERRORS += 1 - print("Error: %s" % comp.stdout) - - for w in tmp: # ignore RunTimeWarnings about importing - if isinstance(w.message, RuntimeWarning) and not warning_re.match(str(w.message)): - ws.append(w) - for w in ws: - warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) - except Exception: - print(traceback.format_exc()) - FAILURES += 1 - ERRORS += 1 - - -def run_integration_tests(verbose=True): - pynwb_test_result = run_test_suite("tests/integration/hdf5", "integration tests", verbose=verbose) - test_cases = pynwb_test_result.get_all_cases_run() - - import pynwb - type_map = pynwb.get_type_map() - - tested_containers = {} - for test_case in test_cases: - if not hasattr(test_case, 'container'): - continue - container_class = test_case.container.__class__ - - if container_class not in tested_containers: - tested_containers[container_class] = [test_case._testMethodName] - else: - tested_containers[container_class].append(test_case._testMethodName) - - count_missing = 0 - for container_class in type_map.get_container_classes('core'): - if container_class not in tested_containers: - count_missing += 1 - if verbose > 1: - logging.info('%s missing test case; should define in %s' % (container_class, - inspect.getfile(container_class))) - - if count_missing > 0: - logging.info('%d classes missing integration tests in ui_write' % count_missing) - else: - logging.info('all classes have integration tests') - - run_test_suite("tests/integration/utils", "integration utils tests", verbose=verbose) - - -def clean_up_tests(): - # remove files generated from running example files - files_to_remove = [ - "advanced_io_example.nwb", - "basic_alternative_custom_write.nwb", - "basic_iterwrite_example.nwb", - "basic_sparse_iterwrite_*.nwb", - "basic_sparse_iterwrite_*.npy", - "basics_tutorial.nwb", - "behavioral_tutorial.nwb", - "brain_observatory.nwb", - "cache_spec_example.nwb", - "ecephys_tutorial.nwb", - "ecog.extensions.yaml", - "ecog.namespace.yaml", - "ex_test_icephys_file.nwb", - "example_timeintervals_file.nwb", - "exported_nwbfile.nwb", - "external_linkcontainer_example.nwb", - "external_linkdataset_example.nwb", - "external1_example.nwb", - "external2_example.nwb", - "icephys_example.nwb", - "icephys_pandas_testfile.nwb", - "images_tutorial.nwb", - "manifest.json", - "mylab.extensions.yaml", - "mylab.namespace.yaml", - "nwbfile.nwb", - "ophys_tutorial.nwb", - "processed_data.nwb", - "raw_data.nwb", - "scratch_analysis.nwb", - "test_cortical_surface.nwb", - "test_icephys_file.nwb", - "test_multicontainerinterface.extensions.yaml", - "test_multicontainerinterface.namespace.yaml", - "test_multicontainerinterface.nwb", - ] - for f in files_to_remove: - for name in glob.glob(f): - if os.path.exists(name): - os.remove(name) - - -def main(): - # setup and parse arguments - parser = argparse.ArgumentParser('python test.py [options]') - parser.set_defaults(verbosity=1, suites=[]) - parser.add_argument('-v', '--verbose', const=2, dest='verbosity', action='store_const', help='run in verbose mode') - parser.add_argument('-q', '--quiet', const=0, dest='verbosity', action='store_const', help='run disabling output') - parser.add_argument('-p', '--pynwb', action='append_const', const=flags['pynwb'], dest='suites', - help='run unit tests for pynwb package') - parser.add_argument('-i', '--integration', action='append_const', const=flags['integration'], dest='suites', - help='run integration tests') - parser.add_argument('-e', '--example', action='append_const', const=flags['example'], dest='suites', - help='run example tests') - parser.add_argument('-f', '--example-ros3', action='append_const', const=flags['example-ros3'], dest='suites', - help='run example tests with ros3 streaming') - parser.add_argument('-b', '--backwards', action='append_const', const=flags['backwards'], dest='suites', - help='run backwards compatibility tests') - parser.add_argument('-w', '--validate-examples', action='append_const', const=flags['validate-examples'], - dest='suites', help='run example tests and validation tests on example NWB files') - parser.add_argument('-r', '--ros3', action='append_const', const=flags['ros3'], dest='suites', - help='run ros3 streaming tests') - parser.add_argument('-x', '--validation-module', action='append_const', const=flags['validation-module'], - dest='suites', help='run tests on pynwb.validate') - args = parser.parse_args() - if not args.suites: - args.suites = list(flags.values()) - # remove from test suites run by default - args.suites.pop(args.suites.index(flags['example'])) - args.suites.pop(args.suites.index(flags['example-ros3'])) - args.suites.pop(args.suites.index(flags['validate-examples'])) - args.suites.pop(args.suites.index(flags['ros3'])) - args.suites.pop(args.suites.index(flags['validation-module'])) - - # set up logger - root = logging.getLogger() - root.setLevel(logging.INFO) - ch = logging.StreamHandler(sys.stdout) - ch.setLevel(logging.INFO) - formatter = logging.Formatter('======================================================================\n' - '%(asctime)s - %(levelname)s - %(message)s') - ch.setFormatter(formatter) - root.addHandler(ch) - - warnings.simplefilter('always') - - warnings.filterwarnings("ignore", category=ImportWarning, module='importlib._bootstrap', - message=("can't resolve package from __spec__ or __package__, falling back on __name__ " - "and __path__")) - - # Run unit tests for pynwb package - if flags['pynwb'] in args.suites: - run_test_suite("tests/unit", "pynwb unit tests", verbose=args.verbosity) - - # Run example tests - is_run_example_tests = False - if flags['example'] in args.suites or flags['validate-examples'] in args.suites: - run_example_tests() - is_run_example_tests = True - - # Run example tests with ros3 streaming examples - # NOTE this requires h5py to be built with ROS3 support and the dandi package to be installed - # this is most easily done by creating a conda environment using environment-ros3.yml - if flags['example-ros3'] in args.suites: - run_example_ros3_tests() - - # Run validation tests on the example NWB files generated above - if flags['validate-examples'] in args.suites: - validate_nwbs() - - # Run integration tests - if flags['integration'] in args.suites: - run_integration_tests(verbose=args.verbosity) - - # Run validation module tests, requires coverage to be installed - if flags['validation-module'] in args.suites: - run_test_suite("tests/validation", "validation tests", verbose=args.verbosity) - - # Run backwards compatibility tests - if flags['backwards'] in args.suites: - run_test_suite("tests/back_compat", "pynwb backwards compatibility tests", verbose=args.verbosity) - - # Run ros3 streaming tests - if flags['ros3'] in args.suites: - run_test_suite("tests/integration/ros3", "pynwb ros3 streaming tests", verbose=args.verbosity) - - # Delete files generated from running example tests above - if is_run_example_tests: - clean_up_tests() - - final_message = 'Ran %s tests' % TOTAL - exitcode = 0 - if ERRORS > 0 or FAILURES > 0: - exitcode = 1 - _list = list() - if ERRORS > 0: - _list.append('errors=%d' % ERRORS) - if FAILURES > 0: - _list.append('failures=%d' % FAILURES) - final_message = '%s - FAILED (%s)' % (final_message, ','.join(_list)) - else: - final_message = '%s - OK' % final_message - - logging.info(final_message) - - return exitcode - - -if __name__ == "__main__": - sys.exit(main()) From 632d047522c178399da4fd26f05be457594a5934 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:40:21 -0800 Subject: [PATCH 29/89] Delete .codecov.yml --- .codecov.yml | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 .codecov.yml diff --git a/.codecov.yml b/.codecov.yml deleted file mode 100644 index 2d9d3e61b..000000000 --- a/.codecov.yml +++ /dev/null @@ -1,12 +0,0 @@ -coverage: - status: - project: - default: - # Don't allow overall project coverage to be dropped more than - # 2% - threshold: 2 - patch: - default: - # 75% of the changed code must be covered by tests - threshold: 25 - only_pulls: true From 313ff22e488d466e7ff0e615fe6193a7faaa8b94 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:40:40 -0800 Subject: [PATCH 30/89] Delete .codespellrc --- .codespellrc | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 .codespellrc diff --git a/.codespellrc b/.codespellrc deleted file mode 100644 index a38689dfe..000000000 --- a/.codespellrc +++ /dev/null @@ -1,8 +0,0 @@ -[codespell] -# in principle .ipynb can be corrected -- a good number of typos there -# nwb-schema -- excluding since submodule, should have its own fixes/checks -skip = .git,*.pdf,*.svg,venvs,env,nwb-schema -ignore-regex = ^\s*"image/\S+": ".* -# it is optin in a url -# potatos - demanded to be left alone, autogenerated -ignore-words-list = optin,potatos From de2c164c748dbdb58d20cc3e1d260ec2854445bd Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:40:56 -0800 Subject: [PATCH 31/89] Delete .coveragerc --- .coveragerc | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index def839969..000000000 --- a/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True -source = src/ -omit = - src/pynwb/_version.py - src/pynwb/_due.py - src/pynwb/testing/* - src/pynwb/legacy/* - -[report] -exclude_lines = - pragma: no cover - @abstract From 72d3a5ded4ffe55657925951fc30946359b30dbd Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:41:36 -0800 Subject: [PATCH 32/89] Create .codecov.yml --- .codecov.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .codecov.yml diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 000000000..2d9d3e61b --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,12 @@ +coverage: + status: + project: + default: + # Don't allow overall project coverage to be dropped more than + # 2% + threshold: 2 + patch: + default: + # 75% of the changed code must be covered by tests + threshold: 25 + only_pulls: true From c5b37145766c0ca6dda8e8b380d2a4788d9eb74f Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 11:50:11 -0800 Subject: [PATCH 33/89] Update tox.ini --- tox.ini | 71 +++++++++++++-------------------------------------------- 1 file changed, 16 insertions(+), 55 deletions(-) diff --git a/tox.ini b/tox.ini index 10b1e0df4..cf5afde76 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py38, py39, py310, py311 +envlist = py38, py39, py310, py311, py 312 requires = pip >= 22.0 [testenv] @@ -22,43 +22,15 @@ deps = commands = python -m pip check # Check for conflicting packages python -m pip list - python test.py -v + pytest -v -# Env to create coverage report locally -[testenv:localcoverage] -basepython = python3.11 -commands = - python -m coverage run test.py --pynwb - coverage html -d tests/coverage/htmlcov - -# Test with python 3.11; pinned dev and optional reqs -[testenv:py311-optional] -basepython = python3.11 -install_command = - python -m pip install {opts} {packages} -deps = - -rrequirements-dev.txt - ; -rrequirements-opt.txt -commands = {[testenv]commands} - -# Test with python 3.11; pinned dev and optional reqs; upgraded run reqs -[testenv:py311-upgraded] -basepython = python3.11 +# Test with python 3.12; pinned dev reqs; upgraded run reqs +[testenv:py312-upgraded] +basepython = python3.12 install_command = python -m pip install -U {opts} {packages} deps = -rrequirements-dev.txt - ; -rrequirements-opt.txt -commands = {[testenv]commands} - -# Test with python 3.11; pinned dev and optional reqs; upgraded, pre-release run reqs -[testenv:py311-prerelease] -basepython = python3.11 -install_command = - python -m pip install -U --pre {opts} {packages} -deps = - -rrequirements-dev.txt - ; -rrequirements-opt.txt commands = {[testenv]commands} # Test with python 3.8; pinned dev reqs; minimum run reqs @@ -91,29 +63,18 @@ commands = {[testenv:build]commands} basepython = python3.11 commands = {[testenv:build]commands} -[testenv:build-py311-optional] -basepython = python3.11 +[testenv:build-py312-optional] +basepython = python3.12 deps = -rrequirements-dev.txt - ; -rrequirements-opt.txt commands = {[testenv:build]commands} -[testenv:build-py311-upgraded] -basepython = python3.11 +[testenv:build-py312-upgraded] +basepython = python3.12 install_command = python -m pip install -U {opts} {packages} deps = -rrequirements-dev.txt - ; -rrequirements-opt.txt -commands = {[testenv:build]commands} - -[testenv:build-py311-prerelease] -basepython = python3.11 -install_command = - python -m pip install -U --pre {opts} {packages} -deps = - -rrequirements-dev.txt - ; -rrequirements-opt.txt commands = {[testenv:build]commands} [testenv:build-py38-minimum] @@ -162,9 +123,9 @@ basepython = python3.11 deps = {[testenv:gallery]deps} commands = {[testenv:gallery]commands} -# Test with python 3.11; pinned dev, doc, and optional reqs; upgraded run reqs -[testenv:gallery-py311-upgraded] -basepython = python3.11 +# Test with python 3.12; pinned dev, doc, and optional reqs; upgraded run reqs +[testenv:gallery-py312-upgraded] +basepython = python3.12 deps = -rrequirements-dev.txt commands = @@ -174,9 +135,9 @@ commands = python -m pip list python test.py --example -# Test with python 3.11; pinned dev, doc, and optional reqs; pre-release run reqs -[testenv:gallery-py311-prerelease] -basepython = python3.11 +# Test with python 3.12; pinned dev, doc, and optional reqs; pre-release run reqs +[testenv:gallery-py312-prerelease] +basepython = python3.12 deps = -rrequirements-dev.txt commands = @@ -191,4 +152,4 @@ commands = basepython = python3.8 deps = -rrequirements-min.txt -commands = {[testenv:gallery]commands} \ No newline at end of file +commands = {[testenv:gallery]commands} From 08c269ca6e59c0cb8895a276e943ec26f80867b2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Sun, 3 Mar 2024 13:46:43 -0800 Subject: [PATCH 34/89] Update tox.ini --- tox.ini | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tox.ini b/tox.ini index cf5afde76..f836bf0a1 100644 --- a/tox.ini +++ b/tox.ini @@ -63,6 +63,10 @@ commands = {[testenv:build]commands} basepython = python3.11 commands = {[testenv:build]commands} +[testenv:build-py312] +basepython = python3.12 +commands = {[testenv:build]commands} + [testenv:build-py312-optional] basepython = python3.12 deps = From 6c2140df787cf48974ea662f66a7fccb392aa861 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 13:39:07 -0800 Subject: [PATCH 35/89] Update release.md --- .github/PULL_REQUEST_TEMPLATE/release.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE/release.md b/.github/PULL_REQUEST_TEMPLATE/release.md index 9fe11241f..b7154c83c 100644 --- a/.github/PULL_REQUEST_TEMPLATE/release.md +++ b/.github/PULL_REQUEST_TEMPLATE/release.md @@ -10,7 +10,7 @@ Prepare for release of PyNWB [version] - [ ] Update `src/pynwb/nwb-schema` submodule as needed. Check the version number and commit SHA manually - [ ] Update changelog (set release date) in `CHANGELOG.md` and any other docs as needed - [ ] Run tests locally including gallery, validation, and streaming tests, and inspect all warnings and outputs - (`python test.py`) + (`python test.py -v -p -i -b -w -r > out.txt 2>&1`) - [ ] Test docs locally (`make clean`, `make html`) - [ ] Push changes to this PR and make sure all PRs to be included in this release have been merged - [ ] Check that the readthedocs build for this PR succeeds (build latest to pull the new branch, then activate and From 9f502ed7ad26abf52ec0358c6d2d42175c78858a Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 13:39:50 -0800 Subject: [PATCH 36/89] Update MANIFEST.in --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 18d92c68e..dd0fdadda 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,5 @@ include license.txt Legal.txt src/pynwb/_due.py include requirements.txt requirements-dev.txt requirements-doc.txt requirements-min.txt environment-ros3.yml -include tox.ini +include test.py tox.ini graft tests From 579b1a49c735c094050965707e598975c74d136b Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 13:43:30 -0800 Subject: [PATCH 37/89] Update install_developers.rst --- docs/source/install_developers.rst | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/docs/source/install_developers.rst b/docs/source/install_developers.rst index 15816397a..8d2743979 100644 --- a/docs/source/install_developers.rst +++ b/docs/source/install_developers.rst @@ -90,13 +90,19 @@ editable mode. Run tests --------- -You can run the full test suite by running: +For running the tests, it is required to install the development requirements. Again, first activate your +virtualenv or conda environment. -.. code:: bash - - pytest +.. code:: + $ git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git + $ cd pynwb + $ pip install -r requirements.txt -r requirements-dev.txt + $ pip install -e . + $ tox -This will run all the tests and compute the test coverage. The coverage report can be found in ``/htmlcov``. +For debugging it can be useful to keep the intermediate NWB files created by +the tests. To keep these files create the environment variables +``CLEAN_NWB``/``CLEAN_HDMF`` and set them to ``1``. FAQ From f716919693c9260c44cb63809bafbebad467029c Mon Sep 17 00:00:00 2001 From: mavaylon1 Date: Wed, 6 Mar 2024 13:46:29 -0800 Subject: [PATCH 38/89] test --- src/pynwb/nwb-schema | 2 +- test.py | 397 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 398 insertions(+), 1 deletion(-) create mode 100644 test.py diff --git a/src/pynwb/nwb-schema b/src/pynwb/nwb-schema index b4f8838cb..d65d42257 160000 --- a/src/pynwb/nwb-schema +++ b/src/pynwb/nwb-schema @@ -1 +1 @@ -Subproject commit b4f8838cbfbb7f8a117bd7e0aad19133d26868b4 +Subproject commit d65d42257003543c569ea7ac0cd6d7aee01c88d6 diff --git a/test.py b/test.py new file mode 100644 index 000000000..16191ae3f --- /dev/null +++ b/test.py @@ -0,0 +1,397 @@ +#!/usr/bin/env python +import warnings +import re +import argparse +import glob +import inspect +import logging +import os.path +import os +from subprocess import run, PIPE, STDOUT +import sys +import traceback +import unittest + +flags = { + 'pynwb': 2, + 'integration': 3, + 'example': 4, + 'backwards': 5, + 'validate-examples': 6, + 'ros3': 7, + 'example-ros3': 8, + 'validation-module': 9 +} + +TOTAL = 0 +FAILURES = 0 +ERRORS = 0 + + +class SuccessRecordingResult(unittest.TextTestResult): + '''A unittest test result class that stores successful test cases as well + as failures and skips. + ''' + + def addSuccess(self, test): + if not hasattr(self, 'successes'): + self.successes = [test] + else: + self.successes.append(test) + + def get_all_cases_run(self): + '''Return a list of each test case which failed or succeeded + ''' + cases = [] + + if hasattr(self, 'successes'): + cases.extend(self.successes) + cases.extend([failure[0] for failure in self.failures]) + + return cases + + +def run_test_suite(directory, description="", verbose=True): + global TOTAL, FAILURES, ERRORS + logging.info("running %s" % description) + directory = os.path.join(os.path.dirname(__file__), directory) + runner = unittest.TextTestRunner(verbosity=verbose, resultclass=SuccessRecordingResult) + # set top_level_dir below to prevent import name clashes between + # tests/unit/test_base.py and tests/integration/hdf5/test_base.py + test_result = runner.run(unittest.TestLoader().discover(directory, top_level_dir='tests')) + + TOTAL += test_result.testsRun + FAILURES += len(test_result.failures) + ERRORS += len(test_result.errors) + + return test_result + + +def _import_from_file(script): + import imp + return imp.load_source(os.path.basename(script), script) + + +warning_re = re.compile("Parent module '[a-zA-Z0-9]+' not found while handling absolute import") + + +ros3_examples = [ + os.path.join('general', 'read_basics.py'), + os.path.join('advanced_io', 'streaming.py'), +] + +allensdk_examples = [ + os.path.join('domain', 'brain_observatory.py'), # TODO create separate workflow for this +] + + +def run_example_tests(): + """Run the Sphinx gallery example files, excluding ROS3-dependent ones, to check for errors.""" + logging.info('running example tests') + examples_scripts = list() + for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), "docs", "gallery")): + for f in files: + if f.endswith(".py"): + name_with_parent_dir = os.path.join(os.path.basename(root), f) + if name_with_parent_dir in ros3_examples or name_with_parent_dir in allensdk_examples: + logging.info("Skipping %s" % name_with_parent_dir) + continue + examples_scripts.append(os.path.join(root, f)) + + __run_example_tests_helper(examples_scripts) + + +def run_example_ros3_tests(): + """Run the Sphinx gallery example files that depend on ROS3 to check for errors.""" + logging.info('running example ros3 tests') + examples_scripts = list() + for root, dirs, files in os.walk(os.path.join(os.path.dirname(__file__), "docs", "gallery")): + for f in files: + if f.endswith(".py"): + name_with_parent_dir = os.path.join(os.path.basename(root), f) + if name_with_parent_dir not in ros3_examples: + logging.info("Skipping %s" % name_with_parent_dir) + continue + examples_scripts.append(os.path.join(root, f)) + + __run_example_tests_helper(examples_scripts) + + +def __run_example_tests_helper(examples_scripts): + global TOTAL, FAILURES, ERRORS + TOTAL += len(examples_scripts) + for script in examples_scripts: + try: + logging.info("Executing %s" % script) + ws = list() + with warnings.catch_warnings(record=True) as tmp: + _import_from_file(script) + for w in tmp: # ignore RunTimeWarnings about importing + if isinstance(w.message, RuntimeWarning) and not warning_re.match(str(w.message)): + ws.append(w) + for w in ws: + warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) + except Exception: + print(traceback.format_exc()) + FAILURES += 1 + ERRORS += 1 + + +def validate_nwbs(): + global TOTAL, FAILURES, ERRORS + logging.info('running validation tests on NWB files') + examples_nwbs = glob.glob('*.nwb') + + import pynwb + + for nwb in examples_nwbs: + try: + logging.info("Validating file %s" % nwb) + + ws = list() + with warnings.catch_warnings(record=True) as tmp: + logging.info("Validating with pynwb.validate method.") + with pynwb.NWBHDF5IO(nwb, mode='r') as io: + errors = pynwb.validate(io) + TOTAL += 1 + + if errors: + FAILURES += 1 + ERRORS += 1 + for err in errors: + print("Error: %s" % err) + + def get_namespaces(nwbfile): + comp = run(["python", "-m", "pynwb.validate", + "--list-namespaces", nwbfile], + stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=30) + + if comp.returncode != 0: + return [] + + return comp.stdout.split() + + namespaces = get_namespaces(nwb) + + if len(namespaces) == 0: + FAILURES += 1 + ERRORS += 1 + + cmds = [] + cmds += [["python", "-m", "pynwb.validate", nwb]] + cmds += [["python", "-m", "pynwb.validate", "--no-cached-namespace", nwb]] + + for ns in namespaces: + # for some reason, this logging command is necessary to correctly printing the namespace in the + # next logging command + logging.info("Namespace found: %s" % ns) + cmds += [["python", "-m", "pynwb.validate", "--ns", ns, nwb]] + + for cmd in cmds: + logging.info("Validating with \"%s\"." % (" ".join(cmd[:-1]))) + comp = run(cmd, stdout=PIPE, stderr=STDOUT, universal_newlines=True, timeout=30) + TOTAL += 1 + + if comp.returncode != 0: + FAILURES += 1 + ERRORS += 1 + print("Error: %s" % comp.stdout) + + for w in tmp: # ignore RunTimeWarnings about importing + if isinstance(w.message, RuntimeWarning) and not warning_re.match(str(w.message)): + ws.append(w) + for w in ws: + warnings.showwarning(w.message, w.category, w.filename, w.lineno, w.line) + except Exception: + print(traceback.format_exc()) + FAILURES += 1 + ERRORS += 1 + + +def run_integration_tests(verbose=True): + pynwb_test_result = run_test_suite("tests/integration/hdf5", "integration tests", verbose=verbose) + test_cases = pynwb_test_result.get_all_cases_run() + + import pynwb + type_map = pynwb.get_type_map() + + tested_containers = {} + for test_case in test_cases: + if not hasattr(test_case, 'container'): + continue + container_class = test_case.container.__class__ + + if container_class not in tested_containers: + tested_containers[container_class] = [test_case._testMethodName] + else: + tested_containers[container_class].append(test_case._testMethodName) + + count_missing = 0 + for container_class in type_map.get_container_classes('core'): + if container_class not in tested_containers: + count_missing += 1 + if verbose > 1: + logging.info('%s missing test case; should define in %s' % (container_class, + inspect.getfile(container_class))) + + if count_missing > 0: + logging.info('%d classes missing integration tests in ui_write' % count_missing) + else: + logging.info('all classes have integration tests') + + run_test_suite("tests/integration/utils", "integration utils tests", verbose=verbose) + + +def clean_up_tests(): + # remove files generated from running example files + files_to_remove = [ + "advanced_io_example.nwb", + "basic_alternative_custom_write.nwb", + "basic_iterwrite_example.nwb", + "basic_sparse_iterwrite_*.nwb", + "basic_sparse_iterwrite_*.npy", + "basics_tutorial.nwb", + "behavioral_tutorial.nwb", + "brain_observatory.nwb", + "cache_spec_example.nwb", + "ecephys_tutorial.nwb", + "ecog.extensions.yaml", + "ecog.namespace.yaml", + "ex_test_icephys_file.nwb", + "example_timeintervals_file.nwb", + "exported_nwbfile.nwb", + "external_linkcontainer_example.nwb", + "external_linkdataset_example.nwb", + "external1_example.nwb", + "external2_example.nwb", + "icephys_example.nwb", + "icephys_pandas_testfile.nwb", + "images_tutorial.nwb", + "manifest.json", + "mylab.extensions.yaml", + "mylab.namespace.yaml", + "nwbfile.nwb", + "ophys_tutorial.nwb", + "processed_data.nwb", + "raw_data.nwb", + "scratch_analysis.nwb", + "test_cortical_surface.nwb", + "test_icephys_file.nwb", + "test_multicontainerinterface.extensions.yaml", + "test_multicontainerinterface.namespace.yaml", + "test_multicontainerinterface.nwb", + ] + for f in files_to_remove: + for name in glob.glob(f): + if os.path.exists(name): + os.remove(name) + + +def main(): + # setup and parse arguments + parser = argparse.ArgumentParser('python test.py [options]') + parser.set_defaults(verbosity=1, suites=[]) + parser.add_argument('-v', '--verbose', const=2, dest='verbosity', action='store_const', help='run in verbose mode') + parser.add_argument('-q', '--quiet', const=0, dest='verbosity', action='store_const', help='run disabling output') + parser.add_argument('-p', '--pynwb', action='append_const', const=flags['pynwb'], dest='suites', + help='run unit tests for pynwb package') + parser.add_argument('-i', '--integration', action='append_const', const=flags['integration'], dest='suites', + help='run integration tests') + parser.add_argument('-e', '--example', action='append_const', const=flags['example'], dest='suites', + help='run example tests') + parser.add_argument('-f', '--example-ros3', action='append_const', const=flags['example-ros3'], dest='suites', + help='run example tests with ros3 streaming') + parser.add_argument('-b', '--backwards', action='append_const', const=flags['backwards'], dest='suites', + help='run backwards compatibility tests') + parser.add_argument('-w', '--validate-examples', action='append_const', const=flags['validate-examples'], + dest='suites', help='run example tests and validation tests on example NWB files') + parser.add_argument('-r', '--ros3', action='append_const', const=flags['ros3'], dest='suites', + help='run ros3 streaming tests') + parser.add_argument('-x', '--validation-module', action='append_const', const=flags['validation-module'], + dest='suites', help='run tests on pynwb.validate') + args = parser.parse_args() + if not args.suites: + args.suites = list(flags.values()) + # remove from test suites run by default + args.suites.pop(args.suites.index(flags['example'])) + args.suites.pop(args.suites.index(flags['example-ros3'])) + args.suites.pop(args.suites.index(flags['validate-examples'])) + args.suites.pop(args.suites.index(flags['ros3'])) + args.suites.pop(args.suites.index(flags['validation-module'])) + + # set up logger + root = logging.getLogger() + root.setLevel(logging.INFO) + ch = logging.StreamHandler(sys.stdout) + ch.setLevel(logging.INFO) + formatter = logging.Formatter('======================================================================\n' + '%(asctime)s - %(levelname)s - %(message)s') + ch.setFormatter(formatter) + root.addHandler(ch) + + warnings.simplefilter('always') + + warnings.filterwarnings("ignore", category=ImportWarning, module='importlib._bootstrap', + message=("can't resolve package from __spec__ or __package__, falling back on __name__ " + "and __path__")) + + # Run unit tests for pynwb package + if flags['pynwb'] in args.suites: + run_test_suite("tests/unit", "pynwb unit tests", verbose=args.verbosity) + + # Run example tests + is_run_example_tests = False + if flags['example'] in args.suites or flags['validate-examples'] in args.suites: + run_example_tests() + is_run_example_tests = True + + # Run example tests with ros3 streaming examples + # NOTE this requires h5py to be built with ROS3 support and the dandi package to be installed + # this is most easily done by creating a conda environment using environment-ros3.yml + if flags['example-ros3'] in args.suites: + run_example_ros3_tests() + + # Run validation tests on the example NWB files generated above + if flags['validate-examples'] in args.suites: + validate_nwbs() + + # Run integration tests + if flags['integration'] in args.suites: + run_integration_tests(verbose=args.verbosity) + + # Run validation module tests, requires coverage to be installed + if flags['validation-module'] in args.suites: + run_test_suite("tests/validation", "validation tests", verbose=args.verbosity) + + # Run backwards compatibility tests + if flags['backwards'] in args.suites: + run_test_suite("tests/back_compat", "pynwb backwards compatibility tests", verbose=args.verbosity) + + # Run ros3 streaming tests + if flags['ros3'] in args.suites: + run_test_suite("tests/integration/ros3", "pynwb ros3 streaming tests", verbose=args.verbosity) + + # Delete files generated from running example tests above + if is_run_example_tests: + clean_up_tests() + + final_message = 'Ran %s tests' % TOTAL + exitcode = 0 + if ERRORS > 0 or FAILURES > 0: + exitcode = 1 + _list = list() + if ERRORS > 0: + _list.append('errors=%d' % ERRORS) + if FAILURES > 0: + _list.append('failures=%d' % FAILURES) + final_message = '%s - FAILED (%s)' % (final_message, ','.join(_list)) + else: + final_message = '%s - OK' % final_message + + logging.info(final_message) + + return exitcode + + +if __name__ == "__main__": + sys.exit(main()) From ceffaca0f6ae8dc91ca5adf51b85d0732152d1c2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 13:47:48 -0800 Subject: [PATCH 39/89] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f836bf0a1..ed4c269ba 100644 --- a/tox.ini +++ b/tox.ini @@ -22,7 +22,7 @@ deps = commands = python -m pip check # Check for conflicting packages python -m pip list - pytest -v + python test.py -v # Test with python 3.12; pinned dev reqs; upgraded run reqs [testenv:py312-upgraded] From f3fa38b7296fd9087273f537ebf4a387775715eb Mon Sep 17 00:00:00 2001 From: mavaylon1 Date: Wed, 6 Mar 2024 13:49:29 -0800 Subject: [PATCH 40/89] tox --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index ed4c269ba..1d6ccc400 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py38, py39, py310, py311, py 312 +envlist = py38, py39, py310, py311, py312 requires = pip >= 22.0 [testenv] From ebb74ec435402fd1f3f00e160f639256bac9172e Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 13:57:13 -0800 Subject: [PATCH 41/89] Update __init__.py --- src/pynwb/__init__.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 5e29caede..607fe6a9e 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -391,8 +391,15 @@ def export(self, **kwargs): from hdmf.data_utils import DataChunkIterator # noqa: F401,E402 from hdmf.backends.hdf5 import H5DataIO # noqa: F401,E402 -from . import _version # noqa: F401,E402 -__version__ = _version.get_versions()['version'] +try: + # see https://effigies.gitlab.io/posts/python-packaging-2023/ + from ._version import __version__ +except ImportError: # pragma: no cover + # this is a relatively slower method for getting the version string + from importlib.metadata import version # noqa: E402 + + __version__ = version("pynwb") + del version from ._due import due, BibTeX # noqa: E402 due.cite( From 0584d6c154ae627a259a2ce329f8a7c0931c6b44 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 14:03:47 -0800 Subject: [PATCH 42/89] Update run_all_tests.yml --- .github/workflows/run_all_tests.yml | 52 ++++++++++++++++------------- 1 file changed, 28 insertions(+), 24 deletions(-) diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index b795e0cf7..d379c1088 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -26,23 +26,26 @@ jobs: - { name: linux-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: ubuntu-latest } - { name: linux-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: ubuntu-latest } - { name: linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - - { name: linux-python3.11-optional , test-tox-env: py311-optional , build-tox-env: build-py311-optional , python-ver: "3.11", os: ubuntu-latest } - - { name: linux-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: ubuntu-latest } - - { name: linux-python3.11-prerelease , test-tox-env: py311-prerelease, build-tox-env: build-py311-prerelease, python-ver: "3.11", os: ubuntu-latest } + - { name: linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } - { name: windows-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: windows-latest } - { name: windows-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: windows-latest } - { name: windows-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: windows-latest } - - { name: windows-python3.11-optional , test-tox-env: py311-optional , build-tox-env: build-py311-optional , python-ver: "3.11", os: windows-latest } - - { name: windows-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: windows-latest } - - { name: windows-python3.11-prerelease, test-tox-env: py311-prerelease, build-tox-env: build-py311-prerelease, python-ver: "3.11", os: windows-latest } + - { name: windows-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: windows-latest } - { name: macos-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: macos-latest } - { name: macos-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: macos-latest } - { name: macos-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: macos-latest } - { name: macos-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } - - { name: macos-python3.11-optional , test-tox-env: py311-optional , build-tox-env: build-py311-optional , python-ver: "3.11", os: macos-latest } - - { name: macos-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: macos-latest } - - { name: macos-python3.11-prerelease , test-tox-env: py311-prerelease, build-tox-env: build-py311-prerelease, python-ver: "3.11", os: macos-latest } + - { name: macos-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -90,14 +93,14 @@ jobs: matrix: include: - { name: linux-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } - - { name: linux-gallery-python3.11-upgraded , test-tox-env: gallery-py311-upgraded , python-ver: "3.11", os: ubuntu-latest } - - { name: linux-gallery-python3.11-prerelease , test-tox-env: gallery-py311-prerelease, python-ver: "3.11", os: ubuntu-latest } + - { name: linux-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } - { name: windows-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: windows-latest } - - { name: windows-gallery-python3.11-upgraded , test-tox-env: gallery-py311-upgraded , python-ver: "3.11", os: windows-latest } - - { name: windows-gallery-python3.11-prerelease, test-tox-env: gallery-py311-prerelease, python-ver: "3.11", os: windows-latest } + - { name: windows-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: windows-latest } + - { name: windows-gallery-python3.12-prerelease, test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: windows-latest } - { name: macos-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: macos-latest } - - { name: macos-gallery-python3.11-upgraded , test-tox-env: gallery-py311-upgraded , python-ver: "3.11", os: macos-latest } - - { name: macos-gallery-python3.11-prerelease , test-tox-env: gallery-py311-prerelease, python-ver: "3.11", os: macos-latest } + - { name: macos-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: macos-latest } + - { name: macos-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -139,9 +142,10 @@ jobs: - { name: conda-linux-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: ubuntu-latest } - { name: conda-linux-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: ubuntu-latest } - { name: conda-linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - - { name: conda-linux-python3.11-optional , test-tox-env: py311-optional , build-tox-env: build-py311-optional , python-ver: "3.11", os: ubuntu-latest } - - { name: conda-linux-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: ubuntu-latest } - - { name: conda-linux-python3.11-prerelease, test-tox-env: py311-prerelease, build-tox-env: build-py311-prerelease, python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -196,9 +200,9 @@ jobs: fail-fast: false matrix: include: - - { name: conda-linux-python3.11-ros3 , python-ver: "3.11", os: ubuntu-latest } - - { name: conda-windows-python3.11-ros3, python-ver: "3.11", os: windows-latest } - - { name: conda-macos-python3.11-ros3 , python-ver: "3.11", os: macos-latest } + - { name: conda-linux-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-windows-python3.12-ros3, python-ver: "3.12", os: windows-latest } + - { name: conda-macos-python3.12-ros3 , python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -243,9 +247,9 @@ jobs: fail-fast: false matrix: include: - - { name: conda-linux-gallery-python3.11-ros3 , python-ver: "3.11", os: ubuntu-latest } - - { name: conda-windows-gallery-python3.11-ros3, python-ver: "3.11", os: windows-latest } - - { name: conda-macos-gallery-python3.11-ros3 , python-ver: "3.11", os: macos-latest } + - { name: conda-linux-gallery-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-windows-gallery-python3.12-ros3, python-ver: "3.12", os: windows-latest } + - { name: conda-macos-gallery-python3.12-ros3 , python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 From d7ec5f32315fbe53679d184b424df1d3f27290dd Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 14:04:39 -0800 Subject: [PATCH 43/89] Update run_tests.yml --- .github/workflows/run_tests.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index c61447998..d62bec2ae 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -21,9 +21,9 @@ jobs: include: - { name: linux-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } # NOTE config below with "upload-wheels: true" specifies that wheels should be uploaded as an artifact - - { name: linux-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: ubuntu-latest , upload-wheels: true } + - { name: linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest , upload-wheels: true } - { name: windows-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: windows-latest } - - { name: windows-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: windows-latest } + - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } - { name: macos-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: macos-latest } steps: - name: Cancel non-latest runs @@ -79,9 +79,9 @@ jobs: matrix: include: - { name: linux-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } - - { name: linux-gallery-python3.11-upgraded , test-tox-env: gallery-py311-upgraded, python-ver: "3.11", os: ubuntu-latest } + - { name: linux-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded, python-ver: "3.12", os: ubuntu-latest } - { name: windows-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: windows-latest } - - { name: windows-gallery-python3.11-upgraded, test-tox-env: gallery-py311-upgraded, python-ver: "3.11", os: windows-latest } + - { name: windows-gallery-python3.12-upgraded, test-tox-env: gallery-py312-upgraded, python-ver: "3.12", os: windows-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -120,7 +120,7 @@ jobs: matrix: include: - { name: conda-linux-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } - - { name: conda-linux-python3.11-upgraded , test-tox-env: py311-upgraded , build-tox-env: build-py311-upgraded , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.11", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 From 2b1bdbc941d27aa4466cb01b8c50a17055870e7e Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Wed, 6 Mar 2024 14:05:22 -0800 Subject: [PATCH 44/89] Update run_tests.yml --- .github/workflows/run_tests.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index d62bec2ae..d2b3d169a 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -120,7 +120,7 @@ jobs: matrix: include: - { name: conda-linux-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } - - { name: conda-linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -174,7 +174,7 @@ jobs: fail-fast: false matrix: include: - - { name: conda-linux-python3.11-ros3 , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -219,7 +219,7 @@ jobs: fail-fast: false matrix: include: - - { name: conda-linux-gallery-python3.11-ros3 , python-ver: "3.11", os: ubuntu-latest } + - { name: conda-linux-gallery-python3.12-ros3 , python-ver: "3.12", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -279,7 +279,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Download wheel and source distributions from artifact uses: actions/download-artifact@v3 From d882ebb7d1daa1857e46a5eb85fe921b9e6d5751 Mon Sep 17 00:00:00 2001 From: mavaylon1 Date: Thu, 7 Mar 2024 08:38:30 -0800 Subject: [PATCH 45/89] schema 2.6 --- src/pynwb/nwb-schema | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pynwb/nwb-schema b/src/pynwb/nwb-schema index d65d42257..b4f8838cb 160000 --- a/src/pynwb/nwb-schema +++ b/src/pynwb/nwb-schema @@ -1 +1 @@ -Subproject commit d65d42257003543c569ea7ac0cd6d7aee01c88d6 +Subproject commit b4f8838cbfbb7f8a117bd7e0aad19133d26868b4 From 7ed8a90a04f27da6e83e9056bc735232604caffc Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 7 Mar 2024 09:14:30 -0800 Subject: [PATCH 46/89] Update pyproject.toml --- pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0b82e3503..6243dae28 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "License :: OSI Approved :: BSD License", "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -62,7 +63,7 @@ packages = ["src/pynwb"] addopts = "--cov --cov-report html" # generates coverage report in html format without showing anything on the terminal. [tool.codespell] -skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema" +skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema,./docs/_build/*,*.ipynb" ignore-words-list = "datas" [tool.coverage.run] @@ -97,7 +98,7 @@ line-length = 120 [tool.ruff.per-file-ignores] "docs/gallery/*" = ["E402", "T201"] "src/*/__init__.py" = ["F401"] -"test_gallery.py" = ["T201"] +# "test_gallery.py" = ["T201"] # Uncomment when test_gallery.py is created [tool.ruff.mccabe] max-complexity = 17 From e9e458f274544ba74534083cd9790032e7f860fe Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 7 Mar 2024 09:36:38 -0800 Subject: [PATCH 47/89] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 1d6ccc400..952a52317 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ download = True usedevelop = True setenv = PYTHONDONTWRITEBYTECODE = 1 - VIRTUALENV_PIP = 22.3.1 + VIRTUALENV_python -m pip = 22.3.1 install_command = python -m pip install -U {opts} {packages} From 094bd2696b8d171f7c85331878806014771721f1 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 7 Mar 2024 10:04:13 -0800 Subject: [PATCH 48/89] Update test.py --- test.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test.py b/test.py index 16191ae3f..08efa0f59 100644 --- a/test.py +++ b/test.py @@ -68,8 +68,10 @@ def run_test_suite(directory, description="", verbose=True): def _import_from_file(script): - import imp - return imp.load_source(os.path.basename(script), script) + # import imp was replaced due to python 3.12 dropping the module. + # this will be removed/changed when pytest is integrated. + from importlib.machinery import SourceFileLoader + return SourceFileLoader(script, os.path.basename(script)) warning_re = re.compile("Parent module '[a-zA-Z0-9]+' not found while handling absolute import") From a285664932c8ea828d995f289c2b070091f53dd6 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 7 Mar 2024 10:14:09 -0800 Subject: [PATCH 49/89] Update test.py --- test.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test.py b/test.py index 08efa0f59..978317847 100644 --- a/test.py +++ b/test.py @@ -70,8 +70,9 @@ def run_test_suite(directory, description="", verbose=True): def _import_from_file(script): # import imp was replaced due to python 3.12 dropping the module. # this will be removed/changed when pytest is integrated. - from importlib.machinery import SourceFileLoader - return SourceFileLoader(script, os.path.basename(script)) + # from importlib.machinery import SourceFileLoader + # return SourceFileLoader(script, os.path.basename(script)) + pass warning_re = re.compile("Parent module '[a-zA-Z0-9]+' not found while handling absolute import") From 822e290f6fd5ac2db41a0defc33e890e1b95480c Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 13:43:13 -0800 Subject: [PATCH 50/89] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 952a52317..1d6ccc400 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ download = True usedevelop = True setenv = PYTHONDONTWRITEBYTECODE = 1 - VIRTUALENV_python -m pip = 22.3.1 + VIRTUALENV_PIP = 22.3.1 install_command = python -m pip install -U {opts} {packages} From d3c6227a15088c2b9429cf73ef726a8185677ec2 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 13:44:02 -0800 Subject: [PATCH 51/89] Update test.py --- test.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test.py b/test.py index 978317847..1f3d4a91c 100644 --- a/test.py +++ b/test.py @@ -68,11 +68,11 @@ def run_test_suite(directory, description="", verbose=True): def _import_from_file(script): - # import imp was replaced due to python 3.12 dropping the module. - # this will be removed/changed when pytest is integrated. - # from importlib.machinery import SourceFileLoader - # return SourceFileLoader(script, os.path.basename(script)) - pass + modname = os.path.basename(script) + spec = importlib.util.spec_from_file_location(os.path.basename(script), script) + module = importlib.util.module_from_spec(spec) + sys.modules[modname] = module + spec.loader.exec_module(module) warning_re = re.compile("Parent module '[a-zA-Z0-9]+' not found while handling absolute import") From a5908a60b39e20c925c1af212caa189bf77b409d Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 14:05:19 -0800 Subject: [PATCH 52/89] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 1d6ccc400..f6c75b325 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ download = True usedevelop = True setenv = PYTHONDONTWRITEBYTECODE = 1 - VIRTUALENV_PIP = 22.3.1 + VIRTUALENV pip = 22.3.1 install_command = python -m pip install -U {opts} {packages} From de9b0c3ad84acd1935985215617b7e69161802f3 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 14:13:13 -0800 Subject: [PATCH 53/89] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index f6c75b325..9343999d7 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ download = True usedevelop = True setenv = PYTHONDONTWRITEBYTECODE = 1 - VIRTUALENV pip = 22.3.1 + VIRTUALENV_pip = 22.3.1 install_command = python -m pip install -U {opts} {packages} From b81a656b3fd19759cf2616321dd3d84170d8ebc1 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 14:14:29 -0800 Subject: [PATCH 54/89] Update test.py --- test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test.py b/test.py index 1f3d4a91c..231facf58 100644 --- a/test.py +++ b/test.py @@ -11,6 +11,7 @@ import sys import traceback import unittest +import importlib.util flags = { 'pynwb': 2, From c23186d87689d9f5763dbc4916f1934dbfa8ddc5 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 14:15:06 -0800 Subject: [PATCH 55/89] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 9343999d7..799eb2411 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ download = True usedevelop = True setenv = PYTHONDONTWRITEBYTECODE = 1 - VIRTUALENV_pip = 22.3.1 + VIRTUALENV_PIP = 23.3.1 install_command = python -m pip install -U {opts} {packages} From bfadb988de06940003f7010fdf0cb5337ead6425 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 15:32:21 -0800 Subject: [PATCH 56/89] Update environment-ros3.yml --- environment-ros3.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/environment-ros3.yml b/environment-ros3.yml index 07838258e..5b4bc6c92 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -4,14 +4,15 @@ channels: - conda-forge - defaults dependencies: - - python==3.11 - - h5py==3.8.0 + - python==3.12 + - h5py==3.10.0 - hdmf==3.12.2 - - matplotlib==3.7.1 - - numpy==1.24.2 - - pandas==2.0.0 + - matplotlib==3.8.0 + - numpy==1.26 + - pandas==2.1.2 - python-dateutil==2.8.2 - setuptools + - pytest==7.4.3 # This is for the upcoming pytest update - dandi==0.59.0 # NOTE: dandi does not support osx-arm64 - fsspec==2023.6.0 - requests==2.28.1 From 4ee3ba5a2b4bd5d60c9da4995012b9adbd7ef725 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 15:40:38 -0800 Subject: [PATCH 57/89] Update environment-ros3.yml --- environment-ros3.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/environment-ros3.yml b/environment-ros3.yml index 5b4bc6c92..d09b890ff 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -14,9 +14,9 @@ dependencies: - setuptools - pytest==7.4.3 # This is for the upcoming pytest update - dandi==0.59.0 # NOTE: dandi does not support osx-arm64 - - fsspec==2023.6.0 - - requests==2.28.1 - - aiohttp==3.8.3 + - fsspec==2024.2.0 + - requests==2.31.0 + - aiohttp==3.9.3 - pip - pip: - remfile==0.1.9 From 97c76f9e18584fcadcfdfc84e47d06b02f05fe92 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 15:43:17 -0800 Subject: [PATCH 58/89] Update environment-ros3.yml --- environment-ros3.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment-ros3.yml b/environment-ros3.yml index d09b890ff..155d2a938 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -13,7 +13,7 @@ dependencies: - python-dateutil==2.8.2 - setuptools - pytest==7.4.3 # This is for the upcoming pytest update - - dandi==0.59.0 # NOTE: dandi does not support osx-arm64 + - dandi==0.60.0 # NOTE: dandi does not support osx-arm64 - fsspec==2024.2.0 - requests==2.31.0 - aiohttp==3.9.3 From aaf3405d544b26657173334a4972b5391132582d Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Fri, 8 Mar 2024 16:01:41 -0800 Subject: [PATCH 59/89] Update ruff.yml --- .github/workflows/ruff.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml index 170ecf921..1933fa75e 100644 --- a/.github/workflows/ruff.yml +++ b/.github/workflows/ruff.yml @@ -1,11 +1,11 @@ -# name: Ruff -# on: pull_request +name: Ruff +on: pull_request -# jobs: -# ruff: -# runs-on: ubuntu-latest -# steps: -# - name: Checkout repo -# uses: actions/checkout@v4 -# - name: Run ruff -# uses: chartboost/ruff-action@v1 +jobs: + ruff: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Run ruff + uses: chartboost/ruff-action@v1 From 8a62ae1ee9c5d0b85c1ba23c66cf1aa8d39a6c24 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 11:45:53 -0700 Subject: [PATCH 60/89] Update pyproject.toml --- pyproject.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 6243dae28..011679474 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,12 +92,17 @@ exclude = [ "docs/source/conf.py", "src/pynwb/_due.py", "docs/source/tutorials/", + "test.py" # remove when pytest comes along ] line-length = 120 [tool.ruff.per-file-ignores] +"tests/read_dandi/*" = ["T201"] "docs/gallery/*" = ["E402", "T201"] "src/*/__init__.py" = ["F401"] +"src/pynwb/_version.py" = ["T201"] +"src/pynwb/validate.py" = ["T201"] + # "test_gallery.py" = ["T201"] # Uncomment when test_gallery.py is created [tool.ruff.mccabe] From ac154b8690bfd6f942fa338ce8cfc61f252307f8 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 11:47:14 -0700 Subject: [PATCH 61/89] Update icephys.py --- src/pynwb/icephys.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pynwb/icephys.py b/src/pynwb/icephys.py index 2de21d571..bed2d4ecd 100644 --- a/src/pynwb/icephys.py +++ b/src/pynwb/icephys.py @@ -491,15 +491,15 @@ def __init__(self, **kwargs): if required_dynamic_table_missing: if required_dynamic_table_given[2] < 0: dynamic_table_arg.append(IntracellularResponsesTable) - if not dynamic_table_arg[-1].name in categories_arg: + if dynamic_table_arg[-1].name not in categories_arg: categories_arg.insert(0, dynamic_table_arg[-1].name) if required_dynamic_table_given[1] < 0: dynamic_table_arg.append(IntracellularStimuliTable()) - if not dynamic_table_arg[-1].name in categories_arg: + if dynamic_table_arg[-1].name not in categories_arg: categories_arg.insert(0, dynamic_table_arg[-1].name) if required_dynamic_table_given[0] < 0: dynamic_table_arg.append(IntracellularElectrodesTable()) - if not dynamic_table_arg[-1].name in categories_arg: + if dynamic_table_arg[-1].name not in categories_arg: categories_arg.insert(0, dynamic_table_arg[-1].name) kwargs['category_tables'] = dynamic_table_arg kwargs['categories'] = categories_arg From bed6716c0e44e6bd62383d9283230735a323f0d6 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 11:53:07 -0700 Subject: [PATCH 62/89] Update extensions.py --- docs/gallery/general/extensions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/gallery/general/extensions.py b/docs/gallery/general/extensions.py index 4ec8f4749..63edab9d7 100644 --- a/docs/gallery/general/extensions.py +++ b/docs/gallery/general/extensions.py @@ -327,7 +327,7 @@ def __init__(self, **kwargs): @register_class("PotatoSack", name) class PotatoSack(MultiContainerInterface): __clsconf__ = { - "attr": "potatos", + "attr": "potatoes", "type": Potato, "add": "add_potato", "get": "get_potato", @@ -345,7 +345,7 @@ class PotatoSack(MultiContainerInterface): from pynwb import NWBHDF5IO, NWBFile # You can add potatoes to a potato sack in different ways -potato_sack = PotatoSack(potatos=Potato(name="potato1", age=2.3, weight=3.0)) +potato_sack = PotatoSack(potateos=Potato(name="potato1", age=2.3, weight=3.0)) potato_sack.add_potato(Potato("potato2", 3.0, 4.0)) potato_sack.create_potato("big_potato", 10.0, 20.0) From f150914f76c1b672486db263c6e4ebf90414ef91 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 11:57:04 -0700 Subject: [PATCH 63/89] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 011679474..70a70dd7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ addopts = "--cov --cov-report html" # generates coverage report in html format w [tool.codespell] skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema,./docs/_build/*,*.ipynb" -ignore-words-list = "datas" +ignore-words-list = "optin,potatos" [tool.coverage.run] branch = true From b552ef6c13723f519f81d7771cae3d9a2ea573fc Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 11:57:43 -0700 Subject: [PATCH 64/89] Update extensions.py --- docs/gallery/general/extensions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/gallery/general/extensions.py b/docs/gallery/general/extensions.py index 63edab9d7..4ec8f4749 100644 --- a/docs/gallery/general/extensions.py +++ b/docs/gallery/general/extensions.py @@ -327,7 +327,7 @@ def __init__(self, **kwargs): @register_class("PotatoSack", name) class PotatoSack(MultiContainerInterface): __clsconf__ = { - "attr": "potatoes", + "attr": "potatos", "type": Potato, "add": "add_potato", "get": "get_potato", @@ -345,7 +345,7 @@ class PotatoSack(MultiContainerInterface): from pynwb import NWBHDF5IO, NWBFile # You can add potatoes to a potato sack in different ways -potato_sack = PotatoSack(potateos=Potato(name="potato1", age=2.3, weight=3.0)) +potato_sack = PotatoSack(potatos=Potato(name="potato1", age=2.3, weight=3.0)) potato_sack.add_potato(Potato("potato2", 3.0, 4.0)) potato_sack.create_potato("big_potato", 10.0, 20.0) From def25a19e9f69a38cae982b75a4b6c249f8e640b Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:00:03 -0700 Subject: [PATCH 65/89] Update pyproject.toml --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 70a70dd7f..3de841c77 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,6 +102,7 @@ line-length = 120 "src/*/__init__.py" = ["F401"] "src/pynwb/_version.py" = ["T201"] "src/pynwb/validate.py" = ["T201"] +"scripts" = ["T201"] # "test_gallery.py" = ["T201"] # Uncomment when test_gallery.py is created From 9650124c4f05da3d21506327e2aa28e3bdf53adc Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:00:51 -0700 Subject: [PATCH 66/89] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3de841c77..6df1a1d01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,7 @@ line-length = 120 "src/*/__init__.py" = ["F401"] "src/pynwb/_version.py" = ["T201"] "src/pynwb/validate.py" = ["T201"] -"scripts" = ["T201"] +"scripts/*" = ["T201"] # "test_gallery.py" = ["T201"] # Uncomment when test_gallery.py is created From c2027789a63b2376ecd9417a31f4ad804e1fec1d Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:07:58 -0700 Subject: [PATCH 67/89] Update CONTRIBUTING.rst --- docs/CONTRIBUTING.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst index 278a06ffc..877c23363 100644 --- a/docs/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -116,7 +116,7 @@ Projects are currently used mainly on the NeurodataWithoutBorders organization l .. _sec-styleguides: Style Guides ------------ +------------ Git Commit Message Styleguide ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 3a44f70261593d90d4f0f1436576fca3d99d506a Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:35:16 -0700 Subject: [PATCH 68/89] Update install_developers.rst --- docs/source/install_developers.rst | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/docs/source/install_developers.rst b/docs/source/install_developers.rst index 8d2743979..428a7e592 100644 --- a/docs/source/install_developers.rst +++ b/docs/source/install_developers.rst @@ -79,12 +79,12 @@ After you have created and activated a virtual environment, clone the PyNWB git package requirements using the `pip `_ Python package manager, and install PyNWB in editable mode. -.. code:: +.. code:: bash - $ git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git - $ cd pynwb - $ pip install -r requirements.txt -r requirements-dev.txt - $ pip install -e . + git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git + cd pynwb + pip install -r requirements.txt -r requirements-dev.txt + pip install -e . Run tests @@ -93,12 +93,13 @@ Run tests For running the tests, it is required to install the development requirements. Again, first activate your virtualenv or conda environment. -.. code:: - $ git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git - $ cd pynwb - $ pip install -r requirements.txt -r requirements-dev.txt - $ pip install -e . - $ tox +.. code:: bash + + git clone --recurse-submodules https://github.com/NeurodataWithoutBorders/pynwb.git + cd pynwb + pip install -r requirements.txt -r requirements-dev.txt + pip install -e . + tox For debugging it can be useful to keep the intermediate NWB files created by the tests. To keep these files create the environment variables From 591b4afbbc9ecacb8c5e4fc438942f605581fb4e Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:38:59 -0700 Subject: [PATCH 69/89] Update images.py --- docs/gallery/domain/images.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/docs/gallery/domain/images.py b/docs/gallery/domain/images.py index 58e9a8e8b..135e18c8c 100644 --- a/docs/gallery/domain/images.py +++ b/docs/gallery/domain/images.py @@ -13,7 +13,8 @@ * :py:class:`~pynwb.image.OpticalSeries` for series of images that were presented as stimulus * :py:class:`~pynwb.image.ImageSeries`, for series of images (movie segments); -* :py:class:`~pynwb.image.GrayscaleImage`, :py:class:`~pynwb.image.RGBImage`, :py:class:`~pynwb.image.RGBAImage`, for static images; +* :py:class:`~pynwb.image.GrayscaleImage`, :py:class:`~pynwb.image.RGBImage`, + :py:class:`~pynwb.image.RGBAImage`, for static images; The following examples will reference variables that may not be defined within the block they are used in. For clarity, we define them here: @@ -136,19 +137,23 @@ # External Files # ^^^^^^^^^^^^^^ # -# External files (e.g. video files of the behaving animal) can be added to the :py:class:`~pynwb.file.NWBFile` by creating -# an :py:class:`~pynwb.image.ImageSeries` object using the :py:attr:`~pynwb.image.ImageSeries.external_file` attribute that specifies the -# path to the external file(s) on disk. The file(s) path must be relative to the path of the NWB file. +# External files (e.g. video files of the behaving animal) can be added to the :py:class:`~pynwb.file.NWBFile` +# by creating an :py:class:`~pynwb.image.ImageSeries` object using the +# :py:attr:`~pynwb.image.ImageSeries.external_file` attribute that specifies +# the path to the external file(s) on disk. +# The file(s) path must be relative to the path of the NWB file. # Either ``external_file`` or ``data`` must be specified, but not both. # -# If the sampling rate is constant, use :py:attr:`~pynwb.base.TimeSeries.rate` and :py:attr:`~pynwb.base.TimeSeries.starting_time` to specify time. -# For irregularly sampled recordings, use :py:attr:`~pynwb.base.TimeSeries.timestamps` to specify time for each sample image. +# If the sampling rate is constant, use :py:attr:`~pynwb.base.TimeSeries.rate` and +# :py:attr:`~pynwb.base.TimeSeries.starting_time` to specify time. +# For irregularly sampled recordings, use :py:attr:`~pynwb.base.TimeSeries.timestamps` to specify time for each sample +# image. # # Each external image may contain one or more consecutive frames of the full :py:class:`~pynwb.image.ImageSeries`. # The :py:attr:`~pynwb.image.ImageSeries.starting_frame` attribute serves as an index to indicate which frame # each file contains. -# For example, if the ``external_file`` dataset has three paths to files and the first and the second file have 2 frames, -# and the third file has 3 frames, then this attribute will have values `[0, 2, 4]`. +# For example, if the ``external_file`` dataset has three paths to files and the first and the second file have 2 +# frames, and the third file has 3 frames, then this attribute will have values `[0, 2, 4]`. external_file = [ os.path.relpath(movie_path, nwbfile_path) for movie_path in moviefiles_path From 0fd509537790158e4f74ae422a80956ad248830a Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:45:56 -0700 Subject: [PATCH 70/89] Update extensions.py --- docs/gallery/general/extensions.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/gallery/general/extensions.py b/docs/gallery/general/extensions.py index 4ec8f4749..ddf9159c7 100644 --- a/docs/gallery/general/extensions.py +++ b/docs/gallery/general/extensions.py @@ -12,12 +12,6 @@ For a more in-depth, step-by-step guide on how to create, document, and publish NWB extensions, we highly recommend visiting the :nwb_overview:`extension tutorial ` on the :nwb_overview:`nwb overview <>` website. - -.. seealso:: - - For more information on available tools for creating extensions, see - :nwb_overview:`here `. - """ #################### From 8d50edd554201dd9ccfef81e9541d48efe139fb1 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 12:58:28 -0700 Subject: [PATCH 71/89] Update pyproject.toml --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6df1a1d01..fc5be8136 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,9 +58,9 @@ version-file = "src/pynwb/_version.py" [tool.hatch.build.targets.wheel] packages = ["src/pynwb"] -[tool.pytest.ini_options] -# Addopts creates a shortcut for pytest. For example below, running `pytest` will actually run `pytest --cov --cov-report html`. -addopts = "--cov --cov-report html" # generates coverage report in html format without showing anything on the terminal. +# [tool.pytest.ini_options] # TODO: uncomment when pytest is integrated +# # Addopts creates a shortcut for pytest. For example below, running `pytest` will actually run `pytest --cov --cov-report html`. +# addopts = "--cov --cov-report html" # generates coverage report in html format without showing anything on the terminal. [tool.codespell] skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,nwb-schema,./docs/_build/*,*.ipynb" From cf5708c1bdb827b35389c2d935e28c82f4e42567 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 13:05:50 -0700 Subject: [PATCH 72/89] Update run_coverage.yml --- .github/workflows/run_coverage.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml index 35094d4e0..2e33bce3d 100644 --- a/.github/workflows/run_coverage.yml +++ b/.github/workflows/run_coverage.yml @@ -68,6 +68,8 @@ jobs: flags: unit files: coverage.xml fail_ci_if_error: true + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - name: Run integration tests and generate coverage report run: | From 273abb75d7045e06006eaf4fbfe278d2530eb72d Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Mon, 11 Mar 2024 13:25:06 -0700 Subject: [PATCH 73/89] Delete src/pynwb/_version.py --- src/pynwb/_version.py | 658 ------------------------------------------ 1 file changed, 658 deletions(-) delete mode 100644 src/pynwb/_version.py diff --git a/src/pynwb/_version.py b/src/pynwb/_version.py deleted file mode 100644 index bf16355e1..000000000 --- a/src/pynwb/_version.py +++ /dev/null @@ -1,658 +0,0 @@ - -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.26 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440-pre" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "None" - cfg.versionfile_source = "src/pynwb/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} From 50513c3afb05bd035c3d3800656d79cca94b349a Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 08:59:23 -0700 Subject: [PATCH 74/89] Update CHANGELOG.md --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d3f762419..ce33d0452 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # PyNWB Changelog +## PyNWB 2.6.1 (Upcoming) + +### Enhancements and minor changes +- Added support for python 3.12 and upgraded dependency versions. This also includes infrastructure updates for developers. @mavaylon1 [#1853](https://github.com/NeurodataWithoutBorders/pynwb/pull/1853) + ## PyNWB 2.6.0 (February 21, 2024) ### Enhancements and minor changes From 4531702233441ce9f6aa6d8c8364ef300e498fba Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 09:43:28 -0700 Subject: [PATCH 75/89] Update pyproject.toml --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index fc5be8136..bc49298de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,6 +72,7 @@ source = ["src/"] omit = [ "src/pynwb/_due.py", "src/pynwb/testing/*", + "src/pynwb/legacy/*" ] [tool.coverage.report] From 52ba0d02a4944273d1cace55baaa17e21c8086fd Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:22:08 -0700 Subject: [PATCH 76/89] Update deploy_release.yml --- .github/workflows/deploy_release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy_release.yml b/.github/workflows/deploy_release.yml index 63475d1ce..d62a03734 100644 --- a/.github/workflows/deploy_release.yml +++ b/.github/workflows/deploy_release.yml @@ -18,7 +18,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install build dependencies run: | From d6e5ba3789f10d84c7000a855a85e0b0a9b5d693 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:22:28 -0700 Subject: [PATCH 77/89] Update check_sphinx_links.yml --- .github/workflows/check_sphinx_links.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check_sphinx_links.yml b/.github/workflows/check_sphinx_links.yml index 573f2e15a..49da87755 100644 --- a/.github/workflows/check_sphinx_links.yml +++ b/.github/workflows/check_sphinx_links.yml @@ -23,7 +23,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install Sphinx dependencies and package run: | From 0eb1d9bcffe3137809c9f4d96d6a2f31b38d4b3b Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:23:18 -0700 Subject: [PATCH 78/89] Update run_dandi_read_tests.yml --- .github/workflows/run_dandi_read_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run_dandi_read_tests.yml b/.github/workflows/run_dandi_read_tests.yml index 0d9e165d8..df9ef4613 100644 --- a/.github/workflows/run_dandi_read_tests.yml +++ b/.github/workflows/run_dandi_read_tests.yml @@ -25,7 +25,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install run dependencies run: | From 49b51ba9e69992eb57a3866aecc2b2f50af589aa Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:23:33 -0700 Subject: [PATCH 79/89] Update run_inspector_tests.yml --- .github/workflows/run_inspector_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run_inspector_tests.yml b/.github/workflows/run_inspector_tests.yml index 9f57f9798..cb8d57458 100644 --- a/.github/workflows/run_inspector_tests.yml +++ b/.github/workflows/run_inspector_tests.yml @@ -23,7 +23,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Update pip run: python -m pip install --upgrade pip From 1c851f0b410707fd14f339f71ea2883ccd458698 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:24:13 -0700 Subject: [PATCH 80/89] Update deploy_release.yml --- .github/workflows/deploy_release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy_release.yml b/.github/workflows/deploy_release.yml index d62a03734..2f6cb8619 100644 --- a/.github/workflows/deploy_release.yml +++ b/.github/workflows/deploy_release.yml @@ -28,11 +28,11 @@ jobs: - name: Run tox tests run: | - tox -e py311-upgraded + tox -e py312-upgraded - name: Build wheel and source distribution run: | - tox -e build-py311-upgraded + tox -e build-py312-upgraded ls -1 dist - name: Test installation from a wheel From bf0aa765f58bf57b67f0160b306423d7424c152e Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:50:39 -0700 Subject: [PATCH 81/89] Update run_all_tests.yml --- .github/workflows/run_all_tests.yml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index d379c1088..4567135bc 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -27,25 +27,19 @@ jobs: - { name: linux-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: ubuntu-latest } - { name: linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - { name: linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } - - { name: linux-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: ubuntu-latest } - { name: linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } - - { name: linux-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } - { name: windows-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: windows-latest } - { name: windows-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: windows-latest } - { name: windows-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: windows-latest } - { name: windows-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: windows-latest } - - { name: windows-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: windows-latest } - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } - - { name: windows-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: windows-latest } - { name: macos-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: macos-latest } - { name: macos-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: macos-latest } - { name: macos-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: macos-latest } - { name: macos-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } - { name: macos-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: macos-latest } - - { name: macos-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: macos-latest } - { name: macos-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: macos-latest } - - { name: macos-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -94,13 +88,10 @@ jobs: include: - { name: linux-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } - { name: linux-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } - - { name: linux-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } - { name: windows-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: windows-latest } - - { name: windows-gallery-python3.12-prerelease, test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: windows-latest } - { name: macos-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: macos-latest } - { name: macos-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: macos-latest } - - { name: macos-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -143,9 +134,7 @@ jobs: - { name: conda-linux-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: ubuntu-latest } - { name: conda-linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - { name: conda-linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } - - { name: conda-linux-python3.12-optional , test-tox-env: py312-optional , build-tox-env: build-py312-optional , python-ver: "3.12", os: ubuntu-latest } - { name: conda-linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } - - { name: conda-linux-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 From 059a1254bce77e4ce70fed0edacfceb43fab7f20 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:51:19 -0700 Subject: [PATCH 82/89] Update tox.ini --- tox.ini | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tox.ini b/tox.ini index 799eb2411..f7e974745 100644 --- a/tox.ini +++ b/tox.ini @@ -67,12 +67,6 @@ commands = {[testenv:build]commands} basepython = python3.12 commands = {[testenv:build]commands} -[testenv:build-py312-optional] -basepython = python3.12 -deps = - -rrequirements-dev.txt -commands = {[testenv:build]commands} - [testenv:build-py312-upgraded] basepython = python3.12 install_command = From 14b778f485573554d971b78122796c685f397c2c Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 14:55:46 -0700 Subject: [PATCH 83/89] Update tox.ini --- tox.ini | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index f7e974745..798da6fe6 100644 --- a/tox.ini +++ b/tox.ini @@ -33,6 +33,15 @@ deps = -rrequirements-dev.txt commands = {[testenv]commands} +# Test with python 3.12; pinned dev reqs; upgraded, pre-release run reqs +[testenv:py312-prerelease] +basepython = python3.12 +install_command = + python -m pip install -U --pre {opts} {packages} +deps = + -rrequirements-dev.txt +commands = {[testenv]commands} + # Test with python 3.8; pinned dev reqs; minimum run reqs [testenv:py38-minimum] basepython = python3.8 @@ -75,6 +84,14 @@ deps = -rrequirements-dev.txt commands = {[testenv:build]commands} +[testenv:build-py312-prerelease] +basepython = python3.12 +install_command = + python -m pip install -U --pre {opts} {packages} +deps = + -rrequirements-dev.txt +commands = {[testenv:build]commands} + [testenv:build-py38-minimum] basepython = python3.8 deps = @@ -88,7 +105,7 @@ deps = null commands = python -c "import pynwb" # Envs that will execute gallery tests that do not require ROS3 -# Test with pinned dev, doc, run, and optional reqs +# Test with pinned dev, doc, and run reqs [testenv:gallery] install_command = python -m pip install -U {opts} {packages} @@ -121,7 +138,7 @@ basepython = python3.11 deps = {[testenv:gallery]deps} commands = {[testenv:gallery]commands} -# Test with python 3.12; pinned dev, doc, and optional reqs; upgraded run reqs +# Test with python 3.12; pinned dev, and doc reqs; upgraded run reqs [testenv:gallery-py312-upgraded] basepython = python3.12 deps = From decb8b286c9ffa8a99a9ece0afa6805ded55c515 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 15:02:01 -0700 Subject: [PATCH 84/89] Update run_all_tests.yml --- .github/workflows/run_all_tests.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/run_all_tests.yml b/.github/workflows/run_all_tests.yml index 4567135bc..1935c7392 100644 --- a/.github/workflows/run_all_tests.yml +++ b/.github/workflows/run_all_tests.yml @@ -28,18 +28,21 @@ jobs: - { name: linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - { name: linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } - { name: linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } - { name: windows-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: windows-latest } - { name: windows-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: windows-latest } - { name: windows-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: windows-latest } - { name: windows-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: windows-latest } - { name: windows-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: windows-latest } + - { name: windows-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.11", os: windows-latest } - { name: macos-python3.8-minimum , test-tox-env: py38-minimum , build-tox-env: build-py38-minimum , python-ver: "3.8" , os: macos-latest } - { name: macos-python3.9 , test-tox-env: py39 , build-tox-env: build-py39 , python-ver: "3.9" , os: macos-latest } - { name: macos-python3.10 , test-tox-env: py310 , build-tox-env: build-py310 , python-ver: "3.10", os: macos-latest } - { name: macos-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: macos-latest } - { name: macos-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: macos-latest } - { name: macos-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: macos-latest } + - { name: macos-python3.12-prerelease , test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -88,10 +91,13 @@ jobs: include: - { name: linux-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: ubuntu-latest } - { name: linux-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: linux-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } - { name: windows-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: windows-latest } - { name: windows-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: windows-latest } + - { name: windows-gallery-python3.12-prerelease, test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: windows-latest } - { name: macos-gallery-python3.8-minimum , test-tox-env: gallery-py38-minimum , python-ver: "3.8" , os: macos-latest } - { name: macos-gallery-python3.12-upgraded , test-tox-env: gallery-py312-upgraded , python-ver: "3.12", os: macos-latest } + - { name: macos-gallery-python3.12-prerelease , test-tox-env: gallery-py312-prerelease, python-ver: "3.12", os: macos-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 @@ -135,6 +141,7 @@ jobs: - { name: conda-linux-python3.11 , test-tox-env: py311 , build-tox-env: build-py311 , python-ver: "3.11", os: ubuntu-latest } - { name: conda-linux-python3.12 , test-tox-env: py312 , build-tox-env: build-py312 , python-ver: "3.12", os: ubuntu-latest } - { name: conda-linux-python3.12-upgraded , test-tox-env: py312-upgraded , build-tox-env: build-py312-upgraded , python-ver: "3.12", os: ubuntu-latest } + - { name: conda-linux-python3.12-prerelease, test-tox-env: py312-prerelease, build-tox-env: build-py312-prerelease, python-ver: "3.12", os: ubuntu-latest } steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 From f0ee13dea1fccf887350eb09f7bc9318f76bc716 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 15:25:55 -0700 Subject: [PATCH 85/89] Update run_coverage.yml --- .github/workflows/run_coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml index 2e33bce3d..400de00f3 100644 --- a/.github/workflows/run_coverage.yml +++ b/.github/workflows/run_coverage.yml @@ -24,7 +24,7 @@ jobs: os: [ubuntu-latest, macos-latest, windows-latest] env: OS: ${{ matrix.os }} - PYTHON: '3.11' + PYTHON: '3.12' steps: - name: Cancel non-latest runs uses: styfle/cancel-workflow-action@0.11.0 From 64f5d8d3964e7d0d4797b76916f5d72940b09aaa Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 15:27:23 -0700 Subject: [PATCH 86/89] Update CONTRIBUTING.rst --- docs/CONTRIBUTING.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst index 877c23363..cbfd3fba6 100644 --- a/docs/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -145,8 +145,8 @@ Format Specification Styleguide Python Code Styleguide ^^^^^^^^^^^^^^^^^^^^^^ -Before you create a Pull Request, make sure you are following the HDMF style guide. -To check whether your code conforms to the HDMF style guide, simply run the ruff_ tool in the project's root +Before you create a Pull Request, make sure you are following the PyNWB style guide. +To check whether your code conforms to the PyNWB style guide, simply run the ruff_ tool in the project's root directory. ``ruff`` will also sort imports automatically and check against additional code style rules. We also use ``ruff`` to sort python imports automatically and double-check that the codebase From c5f487b069a8ab6c21c4df63c274c2ca727decab Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Tue, 12 Mar 2024 17:29:34 -0700 Subject: [PATCH 87/89] Update CONTRIBUTING.rst --- docs/CONTRIBUTING.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst index cbfd3fba6..d51aa12e9 100644 --- a/docs/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -118,7 +118,7 @@ Projects are currently used mainly on the NeurodataWithoutBorders organization l Style Guides ------------ -Git Commit Message Styleguide +Git Commit Message Style Guide ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * Use the present tense ("Add feature" not "Added feature") @@ -127,7 +127,7 @@ Git Commit Message Styleguide * If a commit fixes an issues, then include "Fix #X" where X is the number of the issue. * Reference relevant issues and pull requests liberally after the first line. -Documentation Styleguide +Documentation Style Guide ^^^^^^^^^^^^^^^^^^^^^^^^ All documentations is written in reStructuredText (RST) using Sphinx. @@ -137,12 +137,12 @@ Did you fix whitespace, format code, or make a purely cosmetic patch in source c Source code changes that are purely cosmetic in nature and do not add anything substantial to the stability, functionality, or testability will generally not be accepted unless they have been approved beforehand. One of the main reasons is that there are a lot of hidden costs in addition to writing the code itself, and with the limited resources of the project, we need to optimize developer time. E.g,. someone needs to test and review PRs, backporting of bug fixes gets harder, it creates noise and pollutes the git repo and many other cost factors. -Format Specification Styleguide +Format Specification Style Guide ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **Coming soon** -Python Code Styleguide +Python Code Style Guide ^^^^^^^^^^^^^^^^^^^^^^ Before you create a Pull Request, make sure you are following the PyNWB style guide. From a1cf1b49006ee180bb8863c56e59b460780fc177 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 14 Mar 2024 09:50:42 -0700 Subject: [PATCH 88/89] Update CONTRIBUTING.rst --- docs/CONTRIBUTING.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/CONTRIBUTING.rst b/docs/CONTRIBUTING.rst index d51aa12e9..2ee5d48cb 100644 --- a/docs/CONTRIBUTING.rst +++ b/docs/CONTRIBUTING.rst @@ -119,7 +119,7 @@ Style Guides ------------ Git Commit Message Style Guide -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * Use the present tense ("Add feature" not "Added feature") * The first line should be short and descriptive. @@ -128,7 +128,7 @@ Git Commit Message Style Guide * Reference relevant issues and pull requests liberally after the first line. Documentation Style Guide -^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^ All documentations is written in reStructuredText (RST) using Sphinx. @@ -138,12 +138,12 @@ Did you fix whitespace, format code, or make a purely cosmetic patch in source c Source code changes that are purely cosmetic in nature and do not add anything substantial to the stability, functionality, or testability will generally not be accepted unless they have been approved beforehand. One of the main reasons is that there are a lot of hidden costs in addition to writing the code itself, and with the limited resources of the project, we need to optimize developer time. E.g,. someone needs to test and review PRs, backporting of bug fixes gets harder, it creates noise and pollutes the git repo and many other cost factors. Format Specification Style Guide -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **Coming soon** Python Code Style Guide -^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^ Before you create a Pull Request, make sure you are following the PyNWB style guide. To check whether your code conforms to the PyNWB style guide, simply run the ruff_ tool in the project's root From 9cd8b2a1629892cb7912cfda2893a3e51ec032c6 Mon Sep 17 00:00:00 2001 From: Matthew Avaylon Date: Thu, 14 Mar 2024 10:07:41 -0700 Subject: [PATCH 89/89] Update pyproject.toml --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bc49298de..1bae035af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,6 @@ exclude = [ "src/nwb-schema", "docs/source/conf.py", "src/pynwb/_due.py", - "docs/source/tutorials/", "test.py" # remove when pytest comes along ] line-length = 120