diff --git a/.github/workflows/docker-images.yml b/.github/workflows/docker-images.yml index e17b0c951..767716272 100644 --- a/.github/workflows/docker-images.yml +++ b/.github/workflows/docker-images.yml @@ -48,12 +48,12 @@ jobs: - tensorflow2-gpu steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/remove-extra-software - name: set docker metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ${{ env.PROJECT_PREFIX }}/${{ matrix.dioptra-app }} @@ -77,13 +77,13 @@ jobs: org.opencontainers.image.authors=NCCoE Artificial Intelligence Team , James Glasbrenner , Cory Miniter , Howard Huang , Julian Sexton , Paul Rowe - name: set up docker qemu - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: set up docker buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: build and push - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . target: final @@ -120,10 +120,10 @@ jobs: - mlflow-tracking steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python 3.9 - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: "3.9" @@ -137,7 +137,7 @@ jobs: run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - name: cache dependencies - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/tox.ini') }} @@ -154,10 +154,10 @@ jobs: path: /tmp - name: set up docker qemu - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: set up docker buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: load the container image run: | @@ -180,10 +180,10 @@ jobs: - integration-tf-mnist-classifier steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python 3.9 - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: "3.9" @@ -197,7 +197,7 @@ jobs: run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - name: cache dependencies - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/tox.ini') }} @@ -208,10 +208,10 @@ jobs: run: python3 -m pip install tox - name: set up docker qemu - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: set up docker buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: download the nginx testing image uses: actions/download-artifact@v3 @@ -259,7 +259,7 @@ jobs: - name: cache the mnist dataset download if: ${{ matrix.tox-env != 'containers' }} - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: /tmp/dioptra-cache/mnist key: ${{ runner.os }}-dioptra-cache-mnist diff --git a/.github/workflows/pip-compile.yml b/.github/workflows/pip-compile.yml index ea4c4d49e..4cc381d65 100644 --- a/.github/workflows/pip-compile.yml +++ b/.github/workflows/pip-compile.yml @@ -19,6 +19,9 @@ name: pip-compile runs on: schedule: - cron: "10 1 * * *" # at 1:10am every day + push: + branches: + - "**" jobs: pip-compile: @@ -34,10 +37,10 @@ jobs: - "requirements-dev-tensorflow" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python ${{ matrix.python-version }} - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: ${{ matrix.python-version }} @@ -58,7 +61,7 @@ jobs: - name: cache dependencies if: ${{ matrix.os != 'windows-latest' }} - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini', 'requirements-dev*.in') }} @@ -67,7 +70,7 @@ jobs: - name: cache dependencies (Windows) if: ${{ matrix.os == 'windows-latest' }} - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache-win.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini', 'requirements-dev*.in') }} @@ -129,10 +132,10 @@ jobs: - "tensorflow2-gpu-requirements" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python ${{ matrix.python-version }} - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: ${{ matrix.python-version }} @@ -146,7 +149,7 @@ jobs: run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - name: cache dependencies - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini', '**/requirements-dev*.in', '**/docker/pip-tools/*-requirements.in') }} @@ -177,16 +180,16 @@ jobs: - "py39-linux-aarch64-mlflow-tracking-requirements" - "py39-linux-aarch64-restapi-requirements" - "py39-linux-aarch64-pytorch-cpu-requirements" - # - "py39-linux-aarch64-tensorflow2-cpu-requirements" + - "py39-linux-aarch64-tensorflow2-cpu-requirements" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: set up docker qemu - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: set up docker buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: setup dockerfile and output folder run: | @@ -200,7 +203,7 @@ jobs: sudo chmod 0777 /image - name: build and save to output folder - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . platforms: linux/arm64 diff --git a/.github/workflows/sphinx-docs.yml b/.github/workflows/sphinx-docs.yml index b6bea176b..bfd4c7449 100644 --- a/.github/workflows/sphinx-docs.yml +++ b/.github/workflows/sphinx-docs.yml @@ -27,10 +27,10 @@ jobs: docs: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python 3.9 - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: "3.9" @@ -44,7 +44,7 @@ jobs: run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - name: cache dependencies - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini') }} diff --git a/.github/workflows/tox-tests.yml b/.github/workflows/tox-tests.yml index 0e652942b..0e73982b0 100644 --- a/.github/workflows/tox-tests.yml +++ b/.github/workflows/tox-tests.yml @@ -37,10 +37,10 @@ jobs: - "mypy" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: setup python ${{ matrix.python-version }} - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: ${{ matrix.python-version }} @@ -54,7 +54,7 @@ jobs: run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT - name: cache dependencies - uses: actions/cache@v3.3.1 + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini') }} @@ -68,9 +68,10 @@ jobs: run: python3 -m tox run -e ${{ matrix.tox-testenv }} unit-tests: - runs-on: ubuntu-20.04 + runs-on: ${{ matrix.os }} strategy: matrix: + os: ["ubuntu-20.04", "windows-latest"] python-version: ["3.9", "3.10"] tox-testenv: - "clean,py39-pytest-cov,report" @@ -88,13 +89,14 @@ jobs: tox-testenv: "py39-cookiecutter" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: install English words dictionary + if: ${{ matrix.os == 'ubuntu-20.04' }} run: sudo apt install -y wamerican - name: setup python ${{ matrix.python-version }} - uses: actions/setup-python@v4.7.0 + uses: actions/setup-python@v4.7.1 with: python-version: ${{ matrix.python-version }} @@ -104,19 +106,35 @@ jobs: python3 -m pip install --upgrade pip - name: get pip cache dir + if: ${{ matrix.os != 'windows-latest' }} id: pip-cache run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + - name: get pip cache dir (Windows) + if: ${{ matrix.os == 'windows-latest' }} + id: pip-cache-win + run: echo "dir=$(pip cache dir)" >> $env:GITHUB_OUTPUT + - name: cache dependencies - uses: actions/cache@v3.3.1 + if: ${{ matrix.os != 'windows-latest' }} + uses: actions/cache@v3.3.2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini') }} restore-keys: | ${{ runner.os }}-pip- + - name: cache dependencies (Windows) + if: ${{ matrix.os == 'windows-latest' }} + uses: actions/cache@v3.3.2 + with: + path: ${{ steps.pip-cache-win.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml', '**/tox.ini') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: install dependencies - run: python3 -m pip install tox + run: python3 -m pip install --upgrade tox - name: run tox run: python3 -m tox run -e ${{ matrix.tox-testenv }} diff --git a/Makefile b/Makefile index 0d2ad8394..ac6ff7df1 100644 --- a/Makefile +++ b/Makefile @@ -42,21 +42,21 @@ PYTHON_VERSION_MINOR := $(word 2,$(subst ., ,$(PYTHON_VERSION))) ARCH := $(strip $(shell /usr/bin/env $(PY) -c 'import platform; print(platform.machine().lower())')) ifeq ($(ARCH),x86_64) -DETECTED_ARCH := x86_64 +DETECTED_ARCH := amd64 else ifeq ($(ARCH),amd64) -DETECTED_ARCH := x86_64 +DETECTED_ARCH := amd64 else ifeq ($(ARCH),aarch64) -DETECTED_ARCH := aarch64 +DETECTED_ARCH := arm64 else ifeq ($(ARCH),arm64) -DETECTED_ARCH := aarch64 +DETECTED_ARCH := arm64 endif VENV_EXTRA ?= ifeq ($(DETECTED_OS),Darwin) CORES = $(shell sysctl -n hw.physicalcpu_max) -PIPTOOLS_SYNC := CFLAGS="-stdlib=libc++" pip-sync -VENV_REQUIREMENTS = requirements/macos-$(if $(filter aarch64, $(DETECTED_ARCH)),arm64,x86_64)-py$(PYTHON_VERSION_MAJOR).$(PYTHON_VERSION_MINOR)-requirements-dev$(VENV_EXTRA).txt +PIPTOOLS_SYNC := CFLAGS="-stdlib=libc++ -std=c99" pip-sync +VENV_REQUIREMENTS = requirements/macos-$(DETECTED_ARCH)-py$(PYTHON_VERSION_MAJOR).$(PYTHON_VERSION_MINOR)-requirements-dev$(VENV_EXTRA).txt else ifeq ($(DETECTED_OS),Linux) CORES = $(shell lscpu -p | egrep -v '^\#' | sort -u -t, -k 2,4 | wc -l) PIPTOOLS_SYNC := pip-sync @@ -181,7 +181,7 @@ DOCS_WEB_COMPILE_FILES := $(wildcard $(DOCS_SCSS_DIR)/*.scss) PIP := ifeq ($(DETECTED_OS),Darwin) -PIP += CFLAGS="-stdlib=libc++" $(PY) -m pip +PIP += CFLAGS="-stdlib=libc++ -std=c99" $(PY) -m pip else PIP += $(PY) -m pip endif diff --git a/README.md b/README.md index 60aed754a..09fa67a13 100644 --- a/README.md +++ b/README.md @@ -32,21 +32,21 @@ Ensure that you have Python 3.9 installed and that it is available in your PATH, | Filename | OS | Architecture | Tensorflow | PyTorch | | :--- | :---: | :---: | :--- | :--- | -| linux-x86_64-py3.9-requirements-dev.txt | Linux | x86-64 | ❌ | ❌ | -| linux-x86_64-py3.9-requirements-dev-tensorflow.txt | Linux | x86-64 | ✅ | ❌ | -| linux-x86_64-py3.9-requirements-dev-pytorch.txt | Linux | x86-64 | ❌ | ✅ | -| linux-aarch64-py3.9-requirements-dev.txt | Linux | arm64 | ❌ | ❌ | -| linux-aarch64-py3.9-requirements-dev-tensorflow.txt | Linux | arm64 | ❌ | ❌ | -| linux-aarch64-py3.9-requirements-dev-pytorch.txt | Linux | arm64 | ❌ | ✅ | -| macos-x86_64-py3.9-requirements-dev.txt | MacOS | x86-64 | ❌ | ❌ | -| macos-x86_64-py3.9-requirements-dev-tensorflow.txt | MacOS | x86-64 | ✅ | ❌ | -| macos-x86_64-py3.9-requirements-dev-pytorch.txt | MacOS | x86-64 | ❌ | ✅ | +| linux-amd64-py3.9-requirements-dev.txt | Linux | x86-64 | ❌ | ❌ | +| linux-amd64-py3.9-requirements-dev-tensorflow.txt | Linux | x86-64 | ✅ | ❌ | +| linux-amd64-py3.9-requirements-dev-pytorch.txt | Linux | x86-64 | ❌ | ✅ | +| linux-arm64-py3.9-requirements-dev.txt | Linux | arm64 | ❌ | ❌ | +| linux-arm64-py3.9-requirements-dev-tensorflow.txt | Linux | arm64 | ❌ | ❌ | +| linux-arm64-py3.9-requirements-dev-pytorch.txt | Linux | arm64 | ❌ | ✅ | +| macos-amd64-py3.9-requirements-dev.txt | MacOS | x86-64 | ❌ | ❌ | +| macos-amd64-py3.9-requirements-dev-tensorflow.txt | MacOS | x86-64 | ✅ | ❌ | +| macos-amd64-py3.9-requirements-dev-pytorch.txt | MacOS | x86-64 | ❌ | ✅ | | macos-arm64-py3.9-requirements-dev.txt | MacOS | arm64 | ❌ | ❌ | | macos-arm64-py3.9-requirements-dev-tensorflow.txt | MacOS | arm64 | ✅ | ❌ | | macos-arm64-py3.9-requirements-dev-pytorch.txt | MacOS | arm64 | ❌ | ✅ | -| win-x86_64-py3.9-requirements-dev.txt | Windows | x86-64 | ❌ | ❌ | -| win-x86_64-py3.9-requirements-dev-tensorflow.txt | Windows | x86-64 | ✅ | ❌ | -| win-x86_64-py3.9-requirements-dev-pytorch.txt | Windows | x86-64 | ❌ | ✅ | +| win-amd64-py3.9-requirements-dev.txt | Windows | x86-64 | ❌ | ❌ | +| win-amd64-py3.9-requirements-dev-tensorflow.txt | Windows | x86-64 | ✅ | ❌ | +| win-amd64-py3.9-requirements-dev-pytorch.txt | Windows | x86-64 | ❌ | ✅ | Next, use the `venv` module to create a new virtual environment: @@ -77,15 +77,15 @@ Finally, use `pip-sync` to install the dependencies in your chosen requirements On MacOS/Linux: ```sh -# Replace "linux-x86_64-py3.9-requirements-dev.txt" with your chosen file -pip-sync requirements/linux-x86_64-py3.9-requirements-dev.txt +# Replace "linux-amd64-py3.9-requirements-dev.txt" with your chosen file +pip-sync requirements/linux-amd64-py3.9-requirements-dev.txt ``` On Windows: ```powershell -# Replace "win-x86_64-py3.9-requirements-dev.txt" with your chosen file -pip-sync requirements\win-x86_64-py3.9-requirements-dev.txt +# Replace "win-amd64-py3.9-requirements-dev.txt" with your chosen file +pip-sync requirements\win-amd64-py3.9-requirements-dev.txt ``` If the requirements file you used is updated, or if you want to switch to another requirements file (you need access to the Tensorflow library, for example), just run `pip-sync` again using the appropriate filename. diff --git a/container-vars.mk b/container-vars.mk index 4b2890b04..9a503bb5c 100644 --- a/container-vars.mk +++ b/container-vars.mk @@ -24,7 +24,7 @@ CONTAINER_MLFLOW_TRACKING_INCLUDE_FILES =\ docker/configs/aws-config\ docker/configs/build.pip.conf\ - docker/requirements/linux-x86_64-py3.9-mlflow-tracking-requirements.txt\ + docker/requirements/linux-$(DETECTED_ARCH)-py3.9-mlflow-tracking-requirements.txt\ docker/shellscripts/entrypoint-mlflow-tracking.m4\ docker/shellscripts/fix-permissions.m4\ docker/shellscripts/parse-uri.m4\ @@ -47,7 +47,7 @@ CONTAINER_NGINX_INCLUDE_FILES =\ CONTAINER_PYTORCH_CPU_INCLUDE_FILES =\ docker/configs/aws-config\ docker/configs/build.pip.conf\ - docker/requirements/linux-x86_64-py3.9-pytorch-cpu-requirements.txt\ + docker/requirements/linux-$(DETECTED_ARCH)-py3.9-pytorch-cpu-requirements.txt\ docker/shellscripts/entrypoint-worker.m4\ docker/shellscripts/fix-permissions.m4\ docker/shellscripts/parse-uri.m4\ @@ -65,7 +65,7 @@ CONTAINER_PYTORCH_CPU_INCLUDE_FILES =\ CONTAINER_PYTORCH_GPU_INCLUDE_FILES =\ docker/configs/aws-config\ docker/configs/build.pip.conf\ - docker/requirements/linux-x86_64-py3.9-pytorch-gpu-requirements.txt\ + docker/requirements/linux-amd64-py3.9-pytorch-gpu-requirements.txt\ docker/shellscripts/entrypoint-worker.m4\ docker/shellscripts/fix-permissions.m4\ docker/shellscripts/parse-uri.m4\ @@ -84,7 +84,7 @@ CONTAINER_RESTAPI_INCLUDE_FILES =\ docker/configs/aws-config\ docker/configs/build.pip.conf\ docker/configs/gunicorn.restapi.conf.py\ - docker/requirements/linux-x86_64-py3.9-restapi-requirements.txt\ + docker/requirements/linux-$(DETECTED_ARCH)-py3.9-restapi-requirements.txt\ docker/shellscripts/entrypoint-restapi.m4\ docker/shellscripts/fix-permissions.m4\ docker/shellscripts/parse-uri.m4\ @@ -103,7 +103,7 @@ CONTAINER_RESTAPI_INCLUDE_FILES =\ CONTAINER_TENSORFLOW2_CPU_INCLUDE_FILES =\ docker/configs/aws-config\ docker/configs/build.pip.conf\ - docker/requirements/linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt\ + docker/requirements/linux-$(DETECTED_ARCH)-py3.9-tensorflow2-cpu-requirements.txt\ docker/shellscripts/entrypoint-worker.m4\ docker/shellscripts/fix-permissions.m4\ docker/shellscripts/parse-uri.m4\ @@ -121,7 +121,7 @@ CONTAINER_TENSORFLOW2_CPU_INCLUDE_FILES =\ CONTAINER_TENSORFLOW2_GPU_INCLUDE_FILES =\ docker/configs/aws-config\ docker/configs/build.pip.conf\ - docker/requirements/linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt\ + docker/requirements/linux-amd64-py3.9-tensorflow2-gpu-requirements.txt\ docker/shellscripts/entrypoint-worker.m4\ docker/shellscripts/fix-permissions.m4\ docker/shellscripts/parse-uri.m4\ diff --git a/cookiecutter-templates/cookiecutter-dioptra-deployment/cookiecutter.json b/cookiecutter-templates/cookiecutter-dioptra-deployment/cookiecutter.json index 288ed9838..bdaa1bb1e 100644 --- a/cookiecutter-templates/cookiecutter-dioptra-deployment/cookiecutter.json +++ b/cookiecutter-templates/cookiecutter-dioptra-deployment/cookiecutter.json @@ -83,7 +83,7 @@ "mc": { "image": "mc", "namespace": "minio", - "tag": "RELEASE.2023-01-28T20-29-38Z", + "tag": "latest", "registry": "" }, "minio": { diff --git a/cookiecutter-templates/cookiecutter-dioptra-deployment/hooks/post_gen_project.py b/cookiecutter-templates/cookiecutter-dioptra-deployment/hooks/post_gen_project.py index 54542f299..9fd9c3e96 100644 --- a/cookiecutter-templates/cookiecutter-dioptra-deployment/hooks/post_gen_project.py +++ b/cookiecutter-templates/cookiecutter-dioptra-deployment/hooks/post_gen_project.py @@ -5,6 +5,7 @@ import os import random import shutil +import string import unicodedata from pathlib import Path @@ -166,6 +167,17 @@ def _generate_random_password( def _populate_words(words_file, source_encoding="utf-8", unicode_normalize_form="NFKD"): words = set() + # if dictionary file does not exist, fall back to random words + if not Path(words_file).exists(): + chars = list(string.ascii_lowercase) + for _ in range(int(10000)): + length = random.randint(4, 8) + word = "".join(random.choices(chars, k=length)) + + words.add(word) + + return list(words) + with open(words_file, "rb") as f: for line in f: normalized_line: str = unicodedata.normalize( @@ -173,7 +185,9 @@ def _populate_words(words_file, source_encoding="utf-8", unicode_normalize_form= line.decode(source_encoding).lower().strip(), ) - is_ascii: bool = all([0 <= ord(char) <= 127 for char in normalized_line]) + is_ascii: bool = all( + [0 <= ord(char) <= 127 for char in normalized_line] + ) is_not_plural: bool = not normalized_line.endswith("'s") is_not_short: bool = len(normalized_line) >= 4 diff --git a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/README.md b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/README.md index cf040d960..e096c949f 100644 --- a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/README.md +++ b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/README.md @@ -9,6 +9,7 @@ A collection of scripts, configuration files, and Docker Compose files for initi - [Mounting folders in the worker containers](#mounting-folders-in-the-worker-containers) - [Mounting a folder on the host machine](#mounting-a-folder-on-the-host-machine) - [Mounting a folder on an NFS share](#mounting-a-folder-on-an-nfs-share) + - [Assigning multiple GPUs per worker](#assigning-multiple-gpus-per-worker) - [Initializing the deployment](#initializing-the-deployment) - [Starting the deployment](#starting-the-deployment) - [Using Docker Compose](#using-docker-compose) @@ -130,6 +131,22 @@ The `:ro` at the end will mount the NFS share as read-only within the worker con - dioptra-datasets:/datasets:ro ``` +### Assigning multiple GPUs per worker + +To assign multiple GPUs to a worker, modify the `NVIDIA_VISIBLE_DEVICES` environment variable that is set in the **tfgpu** and **pytorch-gpu** container blocks: + +```yaml + environment: + NVIDIA_VISIBLE_DEVICES: 0,1 +``` + +To allow a worker to use all available GPUs, set `NVIDIA_VISIBLE_DEVICES` to `all`: + +```yaml + environment: + NVIDIA_VISIBLE_DEVICES: all +``` + ## Initializing the deployment The `init-deployment.sh` script is the main tool for initializing the deployment and automates the following steps: diff --git a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/docker-compose.init.yml b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/docker-compose.init.yml index 6b734195b..bdb5a2d99 100644 --- a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/docker-compose.init.yml +++ b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/docker-compose.init.yml @@ -82,14 +82,16 @@ services: image: {{ container_image(cookiecutter.__containers.mc) }} hostname: {{ cookiecutter.__project_slug }}-mc entrypoint: - - "/bin/sh" + - "/bin/bash" {% if cookiecutter.__containers.networks -%} {{ service_networks(cookiecutter.__containers.networks)|indent(4) }} {% endif -%} volumes: - ./config/minio:/s3-policy:ro + - ./secrets:/secrets:ro - minio-certs:/root/.mc/certs/CAs:ro - init-repos:/init-repos:ro + - init-scripts:/scripts:ro argbash: image: {{ container_image(cookiecutter.__containers.argbash) }} diff --git a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/init-deployment.sh b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/init-deployment.sh index f9f26dc58..1922acc81 100755 --- a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/init-deployment.sh +++ b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/init-deployment.sh @@ -35,6 +35,8 @@ CONTAINER_SSL_DIR="/ssl" INIT_ARGBASH_SERVICE="argbash" INIT_DB_SERVICE="db" INIT_FRONTEND_SERVICE="frontend-build" +INIT_MC_SERVICE="mc" +INIT_MINIO_SERVICE="minio" INIT_MLFLOW_TRACKING_SSL_SERVICE="mlflow-tracking-ssl" INIT_NAMED_VOLUMES_SERVICE="named-volumes" INIT_NGINX_SSL_SERVICE="nginx-ssl" @@ -295,6 +297,22 @@ start_db_service() { docker_compose -f "${DOCKER_COMPOSE_INIT_YML}" up -d "${INIT_DB_SERVICE}" } +########################################################################################### +# Starts a MinIO service as a background process +# +# Globals: +# DOCKER_COMPOSE_INIT_YML +# INIT_MINIO_SERVICE +# Arguments: +# None +# Returns: +# None +########################################################################################### + +start_minio_service() { + docker_compose -f "${DOCKER_COMPOSE_INIT_YML}" up -d "${INIT_MINIO_SERVICE}" +} + ########################################################################################### # Enable/disable SSL for the Postgres database # @@ -487,7 +505,8 @@ init_named_volumes() { # Wrapper for the init-minio.sh utility script # # Globals: -# SCRIPT_DIRPATH +# DOCKER_COMPOSE_INIT_YML +# INIT_MC_SERVICE # Arguments: # None # Returns: @@ -495,12 +514,14 @@ init_named_volumes() { ########################################################################################### init_minio() { - local script_path="${SCRIPT_DIRPATH}/scripts/init-minio.sh" + local args=( + "/scripts/init-minio.sh" + ) - if ! /usr/bin/env bash "${script_path}" "${SCRIPT_DIRPATH}"; then - log_error "Encountered an error when executing ${script_path}, exiting..." - exit 1 - fi + docker_compose -f "${DOCKER_COMPOSE_INIT_YML}" run \ + --rm \ + "${INIT_MC_SERVICE}" \ + "${args[@]}" } ########################################################################################### @@ -544,7 +565,9 @@ main() { init_scripts init_extra_ca_certificates init_named_volumes + start_minio_service init_minio + stop_services init_frontend start_db_service manage_postgres_ssl diff --git a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/scripts/init-minio.sh b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/scripts/init-minio.sh index 741589111..dec4ae70a 100644 --- a/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/scripts/init-minio.sh +++ b/cookiecutter-templates/cookiecutter-dioptra-deployment/{{cookiecutter.__project_slug}}/scripts/init-minio.sh @@ -17,26 +17,13 @@ # https://creativecommons.org/licenses/by/4.0/legalcode shopt -s extglob -set -euo pipefail +set -euo pipefail ${DEBUG:+-x} -BASEDIR="${1}" -MINIO_ENDPOINT_ALIAS="minio" +########################################################################################### +# Global parameters +########################################################################################### {% set minio_account_names = ["MLFLOW_TRACKING", "RESTAPI", "WORKER"] -%} -while IFS="=" read -r key value; do - case "${key}" in - {% for minio_account_name in minio_account_names -%} - "MINIO_{{ minio_account_name }}_USER") MINIO_{{ minio_account_name }}_USER="$value" ;; - "MINIO_{{ minio_account_name }}_PASSWORD") MINIO_{{ minio_account_name }}_PASSWORD="$value" ;; - "MINIO_{{ minio_account_name }}_POLICIES") MINIO_{{ minio_account_name }}_POLICIES="$value" ;; - {% endfor -%} - "MINIO_ROOT_USER") MINIO_ROOT_USER="$value" ;; - "MINIO_ROOT_PASSWORD") MINIO_ROOT_PASSWORD="$value" ;; - esac -done < "${BASEDIR}/secrets/{{ cookiecutter.__project_slug }}-minio-accounts.env" - -{{ cookiecutter.docker_compose_path }} -f ${BASEDIR}/docker-compose.init.yml up -d minio - {% set minio_policy_names = [ "builtin-plugins-readonly", "builtin-plugins-readwrite", @@ -48,16 +35,374 @@ done < "${BASEDIR}/secrets/{{ cookiecutter.__project_slug }}-minio-accounts.env" "workflow-downloadonly", "workflow-uploadonly", ] -%} -{{ cookiecutter.docker_compose_path }} -f ${BASEDIR}/docker-compose.init.yml run --rm mc -c "\ - mc alias set ${MINIO_ENDPOINT_ALIAS} http://{{ cookiecutter.__project_slug }}-minio:9000 ${MINIO_ROOT_USER} ${MINIO_ROOT_PASSWORD} && \ - mc mb --p ${MINIO_ENDPOINT_ALIAS}/plugins ${MINIO_ENDPOINT_ALIAS}/workflow ${MINIO_ENDPOINT_ALIAS}/mlflow-tracking && \ - {% for policy_name in minio_policy_names -%} - mc admin policy add ${MINIO_ENDPOINT_ALIAS} {{ policy_name }} /s3-policy/{{ policy_name }}-policy.json && \ - {% endfor -%} - {% for minio_account_name in minio_account_names -%} - mc admin user add ${MINIO_ENDPOINT_ALIAS} {{ '${MINIO_' ~ minio_account_name ~ '_USER}' }} {{ '${MINIO_' ~ minio_account_name ~ '_PASSWORD}' }} && \ - mc admin policy set ${MINIO_ENDPOINT_ALIAS} {{ '${MINIO_' ~ minio_account_name ~ '_POLICIES}' }} user={{ '${MINIO_' ~ minio_account_name ~ '_USER}' }} && \ - {% endfor -%} - mc mirror --overwrite --remove /init-repos/dioptra/task-plugins/dioptra_builtins ${MINIO_ENDPOINT_ALIAS}/plugins/dioptra_builtins" - -{{ cookiecutter.docker_compose_path }} -f ${BASEDIR}/docker-compose.init.yml down +INIT_REPOS_DIR="/init-repos" +LOGNAME="Init MinIO" +MINIO_ENDPOINT_ALIAS="minio" +SECRETS_DIR="/secrets" +S3_POLICY_DIR="/s3-policy" + +{% for minio_account_name in minio_account_names -%} +MINIO_{{ minio_account_name }}_USER="" +MINIO_{{ minio_account_name }}_PASSWORD="" +MINIO_{{ minio_account_name }}_POLICIES="" +{% endfor -%} +MINIO_ROOT_USER="" +MINIO_ROOT_PASSWORD="" + +# NOTE: Mutable global variable +policies_reduced=() + +########################################################################################### +# Print the script help message +# +# Globals: +# SCRIPT_CMDNAME +# Arguments: +# Error messages to log, a string +# Returns: +# None +########################################################################################### + +print_help() { + cat <<-HELPMESSAGE + Utility that configures the MinIO accounts and policies. + + Usage: init-minio.sh [-h|--help] + -h, --help: Prints help + HELPMESSAGE +} + +########################################################################################### +# Print an error log message to stderr +# +# Globals: +# LOGNAME +# Arguments: +# Error messages to log, one or more strings +# Returns: +# None +########################################################################################### + +log_error() { + echo "${LOGNAME}: ERROR -" "${@}" 1>&2 +} + +########################################################################################### +# Print an informational log message to stdout +# +# Globals: +# LOGNAME +# Arguments: +# Info messages to log, one or more strings +# Returns: +# None +########################################################################################### + +log_info() { + echo "${LOGNAME}: INFO -" "${@}" +} + +########################################################################################### +# Parse the script arguments +# +# Globals: +# None +# Arguments: +# Script arguments, an array +# Returns: +# None +########################################################################################### + +parse_args() { + while (({{ '"${#}"' }} > 0)); do + case "${1}" in + -h | --help) + print_help + exit 0 + ;; + *) + log_error "Unrecognized argument ${1}, exiting..." + exit 1 + ;; + esac + done +} + +########################################################################################### +# Load account credentials into environment variables +# +# Globals: +{% for minio_account_name in minio_account_names -%} +# MINIO_{{ minio_account_name }}_USER +# MINIO_{{ minio_account_name }}_PASSWORD +# MINIO_{{ minio_account_name }}_POLICIES +{% endfor -%} +# MINIO_ROOT_USER +# MINIO_ROOT_PASSWORD +# SECRETS_DIR +# Arguments: +# None +# Returns: +# None +########################################################################################### + +load_account_creds() { + while IFS="=" read -r key value; do + case "${key}" in + {% for minio_account_name in minio_account_names -%} + "MINIO_{{ minio_account_name }}_USER") MINIO_{{ minio_account_name }}_USER="$value" ;; + "MINIO_{{ minio_account_name }}_PASSWORD") MINIO_{{ minio_account_name }}_PASSWORD="$value" ;; + "MINIO_{{ minio_account_name }}_POLICIES") MINIO_{{ minio_account_name }}_POLICIES="$value" ;; + {% endfor -%} + "MINIO_ROOT_USER") MINIO_ROOT_USER="$value" ;; + "MINIO_ROOT_PASSWORD") MINIO_ROOT_PASSWORD="$value" ;; + esac + done < "${SECRETS_DIR}/{{ cookiecutter.__project_slug }}-minio-accounts.env" +} + +########################################################################################### +# Configure alias for accessing the MinIO endpoint +# +# Globals: +# MINIO_ENDPOINT_ALIAS +# MINIO_ROOT_USER +# MINIO_ROOT_PASSWORD +# Arguments: +# None +# Returns: +# None +########################################################################################### + +set_minio_alias() { + mc alias set \ + "${MINIO_ENDPOINT_ALIAS}" \ + "http://{{ cookiecutter.__project_slug }}-minio:9000" \ + "${MINIO_ROOT_USER}" \ + "${MINIO_ROOT_PASSWORD}" +} + +########################################################################################### +# Create the plugins, workflow, and mlflow-tracking buckets +# +# Globals: +# MINIO_ENDPOINT_ALIAS +# Arguments: +# None +# Returns: +# None +########################################################################################### + +create_buckets() { + mc mb --p \ + "${MINIO_ENDPOINT_ALIAS}/plugins" \ + "${MINIO_ENDPOINT_ALIAS}/workflow" \ + "${MINIO_ENDPOINT_ALIAS}/mlflow-tracking" +} + +########################################################################################### +# Create the MinIO accounts +# +# Globals: +# MINIO_ENDPOINT_ALIAS +{% for minio_account_name in minio_account_names -%} +# MINIO_{{ minio_account_name }}_USER +# MINIO_{{ minio_account_name }}_PASSWORD +{% endfor -%} +# Arguments: +# None +# Returns: +# None +########################################################################################### + +create_minio_accounts() { +{%- for minio_account_name in minio_account_names %} + mc admin user add \ + "${MINIO_ENDPOINT_ALIAS}" "{{ '${MINIO_' ~ minio_account_name ~ '_USER}' }}" \ + "{{ '${MINIO_' ~ minio_account_name ~ '_PASSWORD}' }}" +{% endfor -%} +} + +########################################################################################### +# Create MinIO access policies +# +# Globals: +# MINIO_ENDPOINT_ALIAS +# S3_POLICY_DIR +# Arguments: +# None +# Returns: +# None +########################################################################################### + +create_minio_policies() { +{%- for policy_name in minio_policy_names %} + mc admin policy create \ + "${MINIO_ENDPOINT_ALIAS}" "{{ policy_name }}" "${S3_POLICY_DIR}/{{ policy_name }}-policy.json" +{% endfor -%} +} + +########################################################################################### +# Remove elements from a comma-separated list +# +# Globals: +# LOGNAME +# Arguments: +# Input list, a comma-separated string +# Elements to match and delete, a comma-separated string +# Returns: +# List with elements removed, a comma-separated string +########################################################################################### + +remove_elements_from_comma_delimited() { + local input_list="${1}" + local elements_to_delete="${2}" + local merged_list="$input_list,$elements_to_delete" + local updated_list_newline_sep=$(echo "${merged_list//,/$'\n'}" | sort | uniq -u) + local updated_list_comma_sep="${updated_list_newline_sep//$'\n'/,}" + echo "${updated_list_comma_sep}" +} + +########################################################################################### +# Set the policies to attach in global policies_reduced after deduplicating repeat policies +# +# Globals: +# LOGNAME +# policies_reduced +# Arguments: +# The current list of policy to user mappings, an array +# The target policies for a user +# The target user +# Returns: +# None +########################################################################################### + +remove_repeat_policies() { + local name=$1[@] + local policies=$2 + local user=$3 + local lines=("${!name}") + + local found_policy=false + local reading_users=false + local select_policy="" + + # lines contains a policy -> user mapping + for i in "${lines[@]}"; do + # is this line of the input indicating a policy? + if grep -q " Policy: " <<<"$i"; then + # since we found a new policy, do not search for users anymore + found_policy=false + reading_users=false + select_policy="" + # is this policy one of the ones we are trying to add? + for policy in $(echo "${policies}" | sed -n 1'p' | tr ',' '\n'); do + if [[ " Policy: ${policy}" == $i ]]; then + # the next few lines will be a list of users + found_policy=true + #save the policy so we know to remove it if necessary + select_policy="${policy}" + fi + done + continue # continue to next line of input + fi + + if [ "${found_policy}" = true ]; then + if [[ " User Mappings:" == "$i" ]]; then + # this indicates the start of a list of users + reading_users=true + continue # continue to next line of input + fi + fi + + if [ "${reading_users}" = true ] && [ "${found_policy}" = true ]; then + if [[ $(echo "$i" | sed 's/^ *//g' | sed 's/ *$//g') == "$user" ]]; then + # the user already has this policy! remove it from the list of policies + policies=$(remove_elements_from_comma_delimited $policies $select_policy) + fi + fi + done + + # policies should not be comma separated + local policies_newline_sep=$(echo "$policies" | sed -e "s/,/\n/g") + if [ ! -z "${policies_newline_sep}" ]; then + IFS=$'\n' read -r -d '' -a policies_reduced < <( echo "${policies_newline_sep}" && printf '\0' ) + fi +} + +########################################################################################### +# Attach MinIO policies to user accounts +# +# Globals: +# MINIO_ENDPOINT_ALIAS +{% for minio_account_name in minio_account_names -%} +# MINIO_{{ minio_account_name }}_USER +# MINIO_{{ minio_account_name }}_POLICIES +{% endfor -%} +# Arguments: +# None +# Returns: +# None +########################################################################################### + +attach_minio_policies() { + local attached_policies=() + + IFS=$'\n' read -r -d '' -a attached_policies < <( mc admin policy entities ${MINIO_ENDPOINT_ALIAS} && printf '\0' ) +{% for minio_account_name in minio_account_names %} + remove_repeat_policies attached_policies \ + "{{ '${MINIO_' ~ minio_account_name ~ '_POLICIES}' }}" "{{ '${MINIO_' ~ minio_account_name ~ '_USER}' }}" + + if [[ ! -z "${policies_reduced[@]}" ]]; then + mc admin policy attach \ + "${MINIO_ENDPOINT_ALIAS}" \ + "${policies_reduced[@]}" \ + --user="{{ '${MINIO_' ~ minio_account_name ~ '_USER}' }}" + fi +{% endfor -%} +} + +########################################################################################### +# Synchronize built-in plugins to plugins bucket +# +# Globals: +# INIT_REPOS_DIR +# MINIO_ENDPOINT_ALIAS +# Arguments: +# None +# Returns: +# None +########################################################################################### + +sync_builtin_plugins() { + mc mirror --overwrite --remove \ + "${INIT_REPOS_DIR}/dioptra/task-plugins/dioptra_builtins" \ + "${MINIO_ENDPOINT_ALIAS}/plugins/dioptra_builtins" +} + +########################################################################################### +# The top-level function in the script +# +# Globals: +# None +# Arguments: +# None +# Returns: +# None +########################################################################################### + +main() { + parse_args "${@}" + load_account_creds + set_minio_alias + create_buckets + create_minio_accounts + create_minio_policies + attach_minio_policies + sync_builtin_plugins +} + +########################################################################################### +# Main script +########################################################################################### + +main "${@}" diff --git a/docker/Dockerfile.mlflow-tracking b/docker/Dockerfile.mlflow-tracking index b30a44265..b87f6d57b 100644 --- a/docker/Dockerfile.mlflow-tracking +++ b/docker/Dockerfile.mlflow-tracking @@ -23,9 +23,11 @@ FROM ubuntu:focal AS copy-files +ARG TARGETARCH + COPY --chown=root:root --chmod=0644 docker/configs/aws-config /files/aws-config COPY --chown=root:root --chmod=0644 docker/configs/build.pip.conf /files/build.pip.conf -COPY --chown=root:root --chmod=0644 docker/requirements/linux-x86_64-py3.9-mlflow-tracking-requirements.txt /files/mlflow-tracking-requirements.txt +COPY --chown=root:root --chmod=0644 docker/requirements/linux-${TARGETARCH}-py3.9-mlflow-tracking-requirements.txt /files/mlflow-tracking-requirements.txt ###################################################################################################### # Base images @@ -148,13 +150,17 @@ RUN mkdir -p /shellscripts && \ FROM certs-base AS install-awscli -ARG AWSCLI_DOWNLOAD_URL=https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip +ARG TARGETARCH +ARG AWSCLI_DOWNLOAD_BASE_URL=https://awscli.amazonaws.com ARG AWSCLI_INSTALL_DIR=/opt/aws-cli ARG AWSCLI_BIN_DIR=/usr/local/bin +SHELL ["/bin/bash", "-c"] + RUN cd /tmp && \ - wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_URL} && \ - unzip awscliv2.zip && \ + AWSCLIARCH=${TARGETARCH/arm/aarch} && \ + wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_BASE_URL}/awscli-exe-linux-${AWSCLIARCH/amd/x86_}.zip && \ + unzip /tmp/awscliv2.zip && \ rm /tmp/awscliv2.zip && \ ./aws/install --install-dir ${AWSCLI_INSTALL_DIR} --bin-dir ${AWSCLI_BIN_DIR} && \ rm -rf /tmp/aws diff --git a/docker/Dockerfile.pytorch-cpu b/docker/Dockerfile.pytorch-cpu index b5beb93f9..046481e54 100644 --- a/docker/Dockerfile.pytorch-cpu +++ b/docker/Dockerfile.pytorch-cpu @@ -23,9 +23,11 @@ FROM ubuntu:focal AS copy-files +ARG TARGETARCH + COPY --chown=root:root --chmod=0644 docker/configs/aws-config /files/aws-config COPY --chown=root:root --chmod=0644 docker/configs/build.pip.conf /files/build.pip.conf -COPY --chown=root:root --chmod=0644 docker/requirements/linux-x86_64-py3.9-pytorch-cpu-requirements.txt /files/pytorch-cpu-requirements.txt +COPY --chown=root:root --chmod=0644 docker/requirements/linux-${TARGETARCH}-py3.9-pytorch-cpu-requirements.txt /files/pytorch-cpu-requirements.txt ###################################################################################################### # Base images @@ -160,13 +162,17 @@ RUN mkdir -p /shellscripts && \ FROM certs-base AS install-awscli -ARG AWSCLI_DOWNLOAD_URL=https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip +ARG TARGETARCH +ARG AWSCLI_DOWNLOAD_BASE_URL=https://awscli.amazonaws.com ARG AWSCLI_INSTALL_DIR=/opt/aws-cli ARG AWSCLI_BIN_DIR=/usr/local/bin +SHELL ["/bin/bash", "-c"] + RUN cd /tmp && \ - wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_URL} && \ - unzip awscliv2.zip && \ + AWSCLIARCH=${TARGETARCH/arm/aarch} && \ + wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_BASE_URL}/awscli-exe-linux-${AWSCLIARCH/amd/x86_}.zip && \ + unzip /tmp/awscliv2.zip && \ rm /tmp/awscliv2.zip && \ ./aws/install --install-dir ${AWSCLI_INSTALL_DIR} --bin-dir ${AWSCLI_BIN_DIR} && \ rm -rf /tmp/aws diff --git a/docker/Dockerfile.pytorch-gpu b/docker/Dockerfile.pytorch-gpu index 4719ef68b..83c901184 100644 --- a/docker/Dockerfile.pytorch-gpu +++ b/docker/Dockerfile.pytorch-gpu @@ -25,7 +25,7 @@ FROM nvidia/cuda:11.3.1-cudnn8-runtime-ubuntu20.04 AS copy-files COPY --chown=root:root --chmod=0644 docker/configs/aws-config /files/aws-config COPY --chown=root:root --chmod=0644 docker/configs/build.pip.conf /files/build.pip.conf -COPY --chown=root:root --chmod=0644 docker/requirements/linux-x86_64-py3.9-pytorch-gpu-requirements.txt /files/pytorch-gpu-requirements.txt +COPY --chown=root:root --chmod=0644 docker/requirements/linux-amd64-py3.9-pytorch-gpu-requirements.txt /files/pytorch-gpu-requirements.txt ###################################################################################################### # Base images diff --git a/docker/Dockerfile.restapi b/docker/Dockerfile.restapi index 89a40f77b..608891072 100644 --- a/docker/Dockerfile.restapi +++ b/docker/Dockerfile.restapi @@ -23,11 +23,13 @@ FROM ubuntu:focal AS copy-files +ARG TARGETARCH + COPY --chown=root:root src/migrations /files/migrations COPY --chown=root:root --chmod=0644 docker/configs/aws-config /files/aws-config COPY --chown=root:root --chmod=0644 docker/configs/build.pip.conf /files/build.pip.conf COPY --chown=root:root --chmod=0644 docker/configs/gunicorn.restapi.conf.py /files/gunicorn.restapi.conf.py -COPY --chown=root:root --chmod=0644 docker/requirements/linux-x86_64-py3.9-restapi-requirements.txt /files/restapi-requirements.txt +COPY --chown=root:root --chmod=0644 docker/requirements/linux-${TARGETARCH}-py3.9-restapi-requirements.txt /files/restapi-requirements.txt COPY --chown=root:root --chmod=0755 wsgi.py /files/wsgi.py RUN chmod 0755 /files/migrations && \ @@ -164,13 +166,17 @@ RUN mkdir -p /shellscripts && \ FROM certs-base AS install-awscli -ARG AWSCLI_DOWNLOAD_URL=https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip +ARG TARGETARCH +ARG AWSCLI_DOWNLOAD_BASE_URL=https://awscli.amazonaws.com ARG AWSCLI_INSTALL_DIR=/opt/aws-cli ARG AWSCLI_BIN_DIR=/usr/local/bin +SHELL ["/bin/bash", "-c"] + RUN cd /tmp && \ - wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_URL} && \ - unzip awscliv2.zip && \ + AWSCLIARCH=${TARGETARCH/arm/aarch} && \ + wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_BASE_URL}/awscli-exe-linux-${AWSCLIARCH/amd/x86_}.zip && \ + unzip /tmp/awscliv2.zip && \ rm /tmp/awscliv2.zip && \ ./aws/install --install-dir ${AWSCLI_INSTALL_DIR} --bin-dir ${AWSCLI_BIN_DIR} && \ rm -rf /tmp/aws diff --git a/docker/Dockerfile.tensorflow2-cpu b/docker/Dockerfile.tensorflow2-cpu index b616b0251..7637d978a 100644 --- a/docker/Dockerfile.tensorflow2-cpu +++ b/docker/Dockerfile.tensorflow2-cpu @@ -23,9 +23,11 @@ FROM ubuntu:focal AS copy-files +ARG TARGETARCH + COPY --chown=root:root --chmod=0644 docker/configs/aws-config /files/aws-config COPY --chown=root:root --chmod=0644 docker/configs/build.pip.conf /files/build.pip.conf -COPY --chown=root:root --chmod=0644 docker/requirements/linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt /files/tensorflow2-cpu-requirements.txt +COPY --chown=root:root --chmod=0644 docker/requirements/linux-${TARGETARCH}-py3.9-tensorflow2-cpu-requirements.txt /files/tensorflow2-cpu-requirements.txt ###################################################################################################### # Base images @@ -160,13 +162,17 @@ RUN mkdir -p /shellscripts && \ FROM certs-base AS install-awscli -ARG AWSCLI_DOWNLOAD_URL=https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip +ARG TARGETARCH +ARG AWSCLI_DOWNLOAD_BASE_URL=https://awscli.amazonaws.com ARG AWSCLI_INSTALL_DIR=/opt/aws-cli ARG AWSCLI_BIN_DIR=/usr/local/bin +SHELL ["/bin/bash", "-c"] + RUN cd /tmp && \ - wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_URL} && \ - unzip awscliv2.zip && \ + AWSCLIARCH=${TARGETARCH/arm/aarch} && \ + wget -qO /tmp/awscliv2.zip ${AWSCLI_DOWNLOAD_BASE_URL}/awscli-exe-linux-${AWSCLIARCH/amd/x86_}.zip && \ + unzip /tmp/awscliv2.zip && \ rm /tmp/awscliv2.zip && \ ./aws/install --install-dir ${AWSCLI_INSTALL_DIR} --bin-dir ${AWSCLI_BIN_DIR} && \ rm -rf /tmp/aws diff --git a/docker/Dockerfile.tensorflow2-gpu b/docker/Dockerfile.tensorflow2-gpu index fb2483d7b..175e7d6bd 100644 --- a/docker/Dockerfile.tensorflow2-gpu +++ b/docker/Dockerfile.tensorflow2-gpu @@ -25,7 +25,7 @@ FROM nvidia/cuda:12.0.0-cudnn8-runtime-ubuntu20.04 AS copy-files COPY --chown=root:root --chmod=0644 docker/configs/aws-config /files/aws-config COPY --chown=root:root --chmod=0644 docker/configs/build.pip.conf /files/build.pip.conf -COPY --chown=root:root --chmod=0644 docker/requirements/linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt /files/tensorflow2-gpu-requirements.txt +COPY --chown=root:root --chmod=0644 docker/requirements/linux-amd64-py3.9-tensorflow2-gpu-requirements.txt /files/tensorflow2-gpu-requirements.txt ###################################################################################################### # Base images diff --git a/docker/requirements/linux-x86_64-py3.9-mlflow-tracking-requirements.txt b/docker/requirements/linux-amd64-py3.9-mlflow-tracking-requirements.txt similarity index 84% rename from docker/requirements/linux-x86_64-py3.9-mlflow-tracking-requirements.txt rename to docker/requirements/linux-amd64-py3.9-mlflow-tracking-requirements.txt index 4d730a1bf..07ee149cc 100644 --- a/docker/requirements/linux-x86_64-py3.9-mlflow-tracking-requirements.txt +++ b/docker/requirements/linux-amd64-py3.9-mlflow-tracking-requirements.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --config=pyproject.toml --output-file=venvs/linux-x86_64-py3.9-mlflow-tracking-requirements.txt docker/pip-tools/mlflow-tracking-requirements.in +# pip-compile --output-file=venvs/linux-amd64-py3.9-mlflow-tracking-requirements.txt docker/pip-tools/mlflow-tracking-requirements.in # -alembic==1.11.1 +alembic==1.12.0 # via mlflow blinker==1.6.2 # via flask -boto3==1.28.16 +boto3==1.28.57 # via -r docker/pip-tools/mlflow-tracking-requirements.in -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -18,26 +18,26 @@ certifi==2023.7.22 # via requests charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # databricks-cli # flask # mlflow cloudpickle==2.2.1 # via mlflow -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow docker==6.1.3 # via mlflow entrypoints==0.4 # via mlflow -flask==2.3.2 +flask==2.3.3 # via # mlflow # prometheus-flask-exporter gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -66,7 +66,7 @@ markupsafe==2.1.3 # werkzeug mlflow==1.30.1 # via -r docker/pip-tools/mlflow-tracking-requirements.in -numpy==1.25.2 +numpy==1.26.0 # via # mlflow # pandas @@ -83,9 +83,9 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/mlflow-tracking-requirements.in pyjwt==2.8.0 # via databricks-cli @@ -108,9 +108,9 @@ requests==2.31.0 # databricks-cli # docker # mlflow -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 -scipy==1.11.1 +scipy==1.11.3 # via mlflow simplejson==3.19.1 # via -r docker/pip-tools/mlflow-tracking-requirements.in @@ -119,7 +119,7 @@ six==1.16.0 # databricks-cli # python-dateutil # querystring-parser -smmap==5.0.0 +smmap==5.0.1 # via gitdb sqlalchemy==1.4.49 # via @@ -129,7 +129,7 @@ sqlparse==0.4.4 # via mlflow tabulate==0.9.0 # via databricks-cli -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via alembic urllib3==1.26.16 # via @@ -137,11 +137,11 @@ urllib3==1.26.16 # databricks-cli # docker # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker -werkzeug==2.3.6 +werkzeug==2.3.7 # via flask -zipp==3.16.2 +zipp==3.17.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/docker/requirements/linux-x86_64-py3.9-pytorch-cpu-requirements.txt b/docker/requirements/linux-amd64-py3.9-pytorch-cpu-requirements.txt similarity index 89% rename from docker/requirements/linux-x86_64-py3.9-pytorch-cpu-requirements.txt rename to docker/requirements/linux-amd64-py3.9-pytorch-cpu-requirements.txt index 8c45aec62..41ccf04d9 100644 --- a/docker/requirements/linux-x86_64-py3.9-pytorch-cpu-requirements.txt +++ b/docker/requirements/linux-amd64-py3.9-pytorch-cpu-requirements.txt @@ -2,20 +2,20 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-pytorch-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-pytorch.in +# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-pytorch-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-pytorch.in # --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html -absl-py==1.4.0 +absl-py==2.0.0 # via tensorboard -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via fsspec aiosignal==1.3.1 # via aiohttp -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate @@ -28,7 +28,7 @@ antlr4-python3-runtime==4.9.3 # omegaconf appdirs==1.4.4 # via black -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -39,9 +39,9 @@ attrs==23.1.0 # referencing black==21.4b2 # via detectron2 -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -51,13 +51,13 @@ cachetools==5.3.1 # via google-auth certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # black # dask @@ -78,7 +78,7 @@ cloudpickle==2.2.1 # prefect cma==3.3.0 # via -r docker/pip-tools/worker-requirements.in -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib croniter==1.4.1 # via prefect @@ -86,19 +86,19 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow decorator==4.4.2 # via moviepy detectron2==0.6+cpu ; sys_platform == "linux" and python_version == "3.9" and platform_machine == "x86_64" # via -r requirements-dev-pytorch.in -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -114,7 +114,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -125,9 +124,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -139,13 +136,13 @@ flask-sqlalchemy==2.5.1 # flask-migrate flask-wtf==1.1.1 # via dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning @@ -155,9 +152,9 @@ fvcore==0.1.5.post20221221 # via detectron2 gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -165,7 +162,7 @@ google-auth-oauthlib==1.0.0 # via tensorboard greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via tensorboard gunicorn==20.1.0 # via mlflow @@ -180,12 +177,12 @@ idna==3.4 # email-validator # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # moviepy # scikit-image -imageio-ffmpeg==0.4.8 +imageio-ffmpeg==0.4.9 # via moviepy imgaug==0.4.0 # via dioptra (pyproject.toml) @@ -196,14 +193,12 @@ importlib-metadata==5.2.0 # gym # markdown # mlflow -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) iopath==0.1.9 # via # detectron2 @@ -220,15 +215,15 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx jsonschema-specifications==2023.7.1 # via jsonschema -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -261,7 +256,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -274,7 +269,7 @@ mlflow==1.30.1 # via dioptra (pyproject.toml) moviepy==1.0.3 # via gym -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -282,7 +277,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) mypy-extensions==1.0.0 # via @@ -290,7 +285,7 @@ mypy-extensions==1.0.0 # prefect networkx==3.1 # via scikit-image -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # cma @@ -325,7 +320,7 @@ omegaconf==2.3.0 # via # detectron2 # hydra-core -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -343,12 +338,11 @@ packaging==21.3 # prefect # pytorch-lightning # scikit-image - # torchmetrics pandas==1.5.3 # via # dioptra (pyproject.toml) # mlflow -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via dioptra (pyproject.toml) @@ -356,7 +350,7 @@ pathspec==0.11.2 # via black pendulum==2.1.2 # via prefect -pillow==10.0.0 +pillow==10.0.1 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -366,7 +360,7 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -portalocker==2.7.0 +portalocker==2.8.2 # via iopath prefect==1.4.1 # via dioptra (pyproject.toml) @@ -376,15 +370,15 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard psutil==5.9.5 # via distributed -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/worker-requirements.in -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -392,7 +386,7 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pycocotools==2.0.6 +pycocotools==2.0.7 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -402,16 +396,16 @@ pydot==1.4.2 # via detectron2 pygame==2.1.0 # via gym -pygments==2.15.1 +pygments==2.16.1 # via rich pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via # matplotlib # packaging # pydot -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -451,15 +445,15 @@ pyyaml==6.0.1 # yacs querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications -regex==2023.6.3 +regex==2023.8.8 # via black requests==2.31.0 # via @@ -474,9 +468,9 @@ requests==2.31.0 # tensorboard requests-oauthlib==1.3.1 # via google-auth-oauthlib -rich==13.5.2 +rich==13.5.3 # via -r docker/pip-tools/worker-requirements.in -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -484,7 +478,7 @@ rq==1.15.1 # via dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -492,7 +486,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) @@ -508,11 +502,11 @@ six==1.16.0 # via # adversarial-robustness-toolbox # databricks-cli - # google-auth # imgaug # python-dateutil # querystring-parser -smmap==5.0.0 + # tensorboard +smmap==5.0.1 # via gitdb sortedcontainers==2.4.0 # via distributed @@ -536,7 +530,7 @@ tabulate==0.9.0 # prefect tblib==2.0.0 # via distributed -tensorboard==2.13.0 +tensorboard==2.14.1 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -550,7 +544,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image toml==0.10.2 # via @@ -570,13 +564,13 @@ torch==1.10.2+cpu ; (sys_platform == "win32" or sys_platform == "linux") and pyt # torchvision torchaudio==0.10.2+cpu ; (sys_platform == "win32" or sys_platform == "linux") and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3+cpu ; (sys_platform == "win32" or sys_platform == "linux") and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via distributed -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # detectron2 @@ -585,7 +579,7 @@ tqdm==4.65.0 # moviepy # proglog # pytorch-lightning -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # dioptra (pyproject.toml) @@ -598,10 +592,9 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # prefect # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -610,8 +603,6 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 - # via tensorboard wtforms[email]==3.0.1 # via # dioptra (pyproject.toml) @@ -624,7 +615,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/docker/requirements/linux-x86_64-py3.9-pytorch-gpu-requirements.txt b/docker/requirements/linux-amd64-py3.9-pytorch-gpu-requirements.txt similarity index 89% rename from docker/requirements/linux-x86_64-py3.9-pytorch-gpu-requirements.txt rename to docker/requirements/linux-amd64-py3.9-pytorch-gpu-requirements.txt index 03188ef2d..e2f8271cc 100644 --- a/docker/requirements/linux-x86_64-py3.9-pytorch-gpu-requirements.txt +++ b/docker/requirements/linux-amd64-py3.9-pytorch-gpu-requirements.txt @@ -2,20 +2,20 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-pytorch-gpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-pytorch-gpu.in +# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-pytorch-gpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-pytorch-gpu.in # --find-links https://download.pytorch.org/whl/cu113/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cu113/torch1.10/index.html -absl-py==1.4.0 +absl-py==2.0.0 # via tensorboard -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via fsspec aiosignal==1.3.1 # via aiohttp -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate @@ -28,7 +28,7 @@ antlr4-python3-runtime==4.9.3 # omegaconf appdirs==1.4.4 # via black -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -39,9 +39,9 @@ attrs==23.1.0 # referencing black==21.4b2 # via detectron2 -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -51,13 +51,13 @@ cachetools==5.3.1 # via google-auth certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # black # dask @@ -78,7 +78,7 @@ cloudpickle==2.2.1 # prefect cma==3.3.0 # via -r docker/pip-tools/worker-requirements.in -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib croniter==1.4.1 # via prefect @@ -86,19 +86,19 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow decorator==4.4.2 # via moviepy detectron2==0.6+cu113 ; sys_platform == "linux" and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch-gpu.in -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -114,7 +114,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -125,9 +124,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -139,13 +136,13 @@ flask-sqlalchemy==2.5.1 # flask-migrate flask-wtf==1.1.1 # via dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning @@ -155,9 +152,9 @@ fvcore==0.1.5.post20221221 # via detectron2 gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -165,7 +162,7 @@ google-auth-oauthlib==1.0.0 # via tensorboard greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via tensorboard gunicorn==20.1.0 # via mlflow @@ -180,12 +177,12 @@ idna==3.4 # email-validator # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # moviepy # scikit-image -imageio-ffmpeg==0.4.8 +imageio-ffmpeg==0.4.9 # via moviepy imgaug==0.4.0 # via dioptra (pyproject.toml) @@ -196,14 +193,12 @@ importlib-metadata==5.2.0 # gym # markdown # mlflow -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) iopath==0.1.9 # via # detectron2 @@ -220,15 +215,15 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx jsonschema-specifications==2023.7.1 # via jsonschema -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -261,7 +256,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -274,7 +269,7 @@ mlflow==1.30.1 # via dioptra (pyproject.toml) moviepy==1.0.3 # via gym -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -282,7 +277,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) mypy-extensions==1.0.0 # via @@ -290,7 +285,7 @@ mypy-extensions==1.0.0 # prefect networkx==3.1 # via scikit-image -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # cma @@ -325,7 +320,7 @@ omegaconf==2.3.0 # via # detectron2 # hydra-core -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -343,12 +338,11 @@ packaging==21.3 # prefect # pytorch-lightning # scikit-image - # torchmetrics pandas==1.5.3 # via # dioptra (pyproject.toml) # mlflow -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via dioptra (pyproject.toml) @@ -356,7 +350,7 @@ pathspec==0.11.2 # via black pendulum==2.1.2 # via prefect -pillow==10.0.0 +pillow==10.0.1 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -366,7 +360,7 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -portalocker==2.7.0 +portalocker==2.8.2 # via iopath prefect==1.4.1 # via dioptra (pyproject.toml) @@ -376,15 +370,15 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard psutil==5.9.5 # via distributed -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/worker-requirements.in -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -392,7 +386,7 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pycocotools==2.0.6 +pycocotools==2.0.7 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -402,16 +396,16 @@ pydot==1.4.2 # via detectron2 pygame==2.1.0 # via gym -pygments==2.15.1 +pygments==2.16.1 # via rich pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via # matplotlib # packaging # pydot -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -451,15 +445,15 @@ pyyaml==6.0.1 # yacs querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications -regex==2023.6.3 +regex==2023.8.8 # via black requests==2.31.0 # via @@ -474,9 +468,9 @@ requests==2.31.0 # tensorboard requests-oauthlib==1.3.1 # via google-auth-oauthlib -rich==13.5.2 +rich==13.5.3 # via -r docker/pip-tools/worker-requirements.in -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -484,7 +478,7 @@ rq==1.15.1 # via dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -492,7 +486,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) @@ -508,11 +502,11 @@ six==1.16.0 # via # adversarial-robustness-toolbox # databricks-cli - # google-auth # imgaug # python-dateutil # querystring-parser -smmap==5.0.0 + # tensorboard +smmap==5.0.1 # via gitdb sortedcontainers==2.4.0 # via distributed @@ -536,7 +530,7 @@ tabulate==0.9.0 # prefect tblib==2.0.0 # via distributed -tensorboard==2.13.0 +tensorboard==2.14.1 # via # -r docker/pip-tools/worker-requirements.in # detectron2 @@ -550,7 +544,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image toml==0.10.2 # via @@ -570,13 +564,13 @@ torch==1.10.2+cu113 ; sys_platform == "linux" and python_version == "3.9" and (p # torchvision torchaudio==0.10.2+cu113 ; sys_platform == "linux" and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch-gpu.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3+cu113 ; sys_platform == "linux" and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch-gpu.in -tornado==6.3.2 +tornado==6.3.3 # via distributed -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # detectron2 @@ -585,7 +579,7 @@ tqdm==4.65.0 # moviepy # proglog # pytorch-lightning -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # dioptra (pyproject.toml) @@ -598,10 +592,9 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # prefect # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -610,8 +603,6 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 - # via tensorboard wtforms[email]==3.0.1 # via # dioptra (pyproject.toml) @@ -624,7 +615,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/docker/requirements/linux-x86_64-py3.9-restapi-requirements.txt b/docker/requirements/linux-amd64-py3.9-restapi-requirements.txt similarity index 87% rename from docker/requirements/linux-x86_64-py3.9-restapi-requirements.txt rename to docker/requirements/linux-amd64-py3.9-restapi-requirements.txt index ebbde169a..64fb3a3ce 100644 --- a/docker/requirements/linux-x86_64-py3.9-restapi-requirements.txt +++ b/docker/requirements/linux-amd64-py3.9-restapi-requirements.txt @@ -2,24 +2,24 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --output-file=venvs/linux-x86_64-py3.9-restapi-requirements.txt docker/pip-tools/restapi-requirements.in pyproject.toml +# pip-compile --output-file=venvs/linux-amd64-py3.9-restapi-requirements.txt docker/pip-tools/restapi-requirements.in pyproject.toml # -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate # mlflow aniso8601==9.0.1 # via flask-restx -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis attrs==23.1.0 # via # jsonschema # referencing -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -27,7 +27,7 @@ certifi==2023.7.22 # via requests charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # databricks-cli # dioptra (pyproject.toml) @@ -36,9 +36,9 @@ click==8.1.6 # rq cloudpickle==2.2.1 # via mlflow -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via mlflow @@ -52,7 +52,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -63,9 +62,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -79,7 +76,7 @@ flask-wtf==1.1.1 # via dioptra (pyproject.toml) gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -96,9 +93,7 @@ importlib-metadata==5.2.0 # flask # mlflow injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) itsdangerous==2.1.2 # via # flask @@ -109,7 +104,7 @@ jmespath==1.0.1 # via # boto3 # botocore -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx @@ -128,9 +123,9 @@ marshmallow==3.20.1 # flask-accepts mlflow==1.30.1 # via dioptra (pyproject.toml) -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) -numpy==1.25.2 +numpy==1.26.0 # via # dioptra (pyproject.toml) # mlflow @@ -149,15 +144,15 @@ pandas==1.5.3 # mlflow passlib==1.7.4 # via dioptra (pyproject.toml) -pillow==10.0.0 +pillow==10.0.1 # via -r docker/pip-tools/restapi-requirements.in prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/restapi-requirements.in pyjwt==2.8.0 # via databricks-cli @@ -181,11 +176,11 @@ pyyaml==6.0.1 # mlflow querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -195,15 +190,15 @@ requests==2.31.0 # dioptra (pyproject.toml) # docker # mlflow -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing rq==1.15.1 # via dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 -scipy==1.11.1 +scipy==1.11.3 # via # dioptra (pyproject.toml) # mlflow @@ -214,7 +209,7 @@ six==1.16.0 # databricks-cli # python-dateutil # querystring-parser -smmap==5.0.0 +smmap==5.0.1 # via gitdb sqlalchemy==1.4.49 # via @@ -228,7 +223,7 @@ structlog==23.1.0 # via dioptra (pyproject.toml) tabulate==0.9.0 # via databricks-cli -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # dioptra (pyproject.toml) @@ -238,7 +233,7 @@ urllib3==1.26.16 # databricks-cli # docker # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -250,7 +245,7 @@ wtforms[email]==3.0.1 # via # dioptra (pyproject.toml) # flask-wtf -zipp==3.16.2 +zipp==3.17.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/docker/requirements/linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt b/docker/requirements/linux-amd64-py3.9-tensorflow2-cpu-requirements.txt similarity index 88% rename from docker/requirements/linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt rename to docker/requirements/linux-amd64-py3.9-tensorflow2-cpu-requirements.txt index 6b020283b..bef3efa37 100644 --- a/docker/requirements/linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt +++ b/docker/requirements/linux-amd64-py3.9-tensorflow2-cpu-requirements.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-tensorflow.in +# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-tensorflow2-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-tensorflow.in # -absl-py==1.4.0 +absl-py==2.0.0 # via # tensorboard # tensorflow-cpu -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate @@ -19,15 +19,15 @@ aniso8601==9.0.1 # via flask-restx astunparse==1.6.3 # via tensorflow-cpu -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis attrs==23.1.0 # via # jsonschema # referencing -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -37,11 +37,11 @@ cachetools==5.3.1 # via google-auth certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # dask # databricks-cli @@ -60,7 +60,7 @@ cloudpickle==2.2.1 # prefect cma==3.3.0 # via -r docker/pip-tools/worker-requirements.in -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib croniter==1.4.1 # via prefect @@ -68,17 +68,17 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow decorator==4.4.2 # via moviepy -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -94,7 +94,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -105,9 +104,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -121,17 +118,17 @@ flask-wtf==1.1.1 # via dioptra (pyproject.toml) flatbuffers==23.5.26 # via tensorflow-cpu -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib -fsspec==2023.6.0 +fsspec==2023.9.2 # via dask gast==0.4.0 # via tensorflow-cpu gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -141,7 +138,7 @@ google-pasta==0.2.0 # via tensorflow-cpu greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via # tensorboard # tensorflow-cpu @@ -157,12 +154,12 @@ idna==3.4 # via # email-validator # requests -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # moviepy # scikit-image -imageio-ffmpeg==0.4.8 +imageio-ffmpeg==0.4.9 # via moviepy imgaug==0.4.0 # via dioptra (pyproject.toml) @@ -174,19 +171,17 @@ importlib-metadata==5.2.0 # jax # markdown # mlflow -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) itsdangerous==2.1.2 # via # flask # flask-wtf -jax==0.4.14 +jax==0.4.16 # via tensorflow-cpu jinja2==3.1.2 # via @@ -196,9 +191,9 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx @@ -206,7 +201,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema keras==2.12.0 # via tensorflow-cpu -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -237,7 +232,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -245,17 +240,17 @@ matplotlib==3.7.2 # pycocotools mdurl==0.1.2 # via markdown-it-py -ml-dtypes==0.2.0 +ml-dtypes==0.3.1 # via jax mlflow==1.30.1 # via dioptra (pyproject.toml) moviepy==1.0.3 # via gym -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) mypy-extensions==1.0.0 # via prefect @@ -293,7 +288,7 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -317,13 +312,13 @@ pandas==1.5.3 # via # dioptra (pyproject.toml) # mlflow -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via dioptra (pyproject.toml) pendulum==2.1.2 # via prefect -pillow==10.0.0 +pillow==10.0.1 # via # -r docker/pip-tools/worker-requirements.in # imageio @@ -338,16 +333,16 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard # tensorflow-cpu psutil==5.9.5 # via distributed -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/worker-requirements.in -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -355,21 +350,21 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pycocotools==2.0.6 +pycocotools==2.0.7 # via -r docker/pip-tools/worker-requirements.in pycparser==2.21 # via cffi pygame==2.1.0 # via gym -pygments==2.15.1 +pygments==2.16.1 # via rich pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via # matplotlib # packaging -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -403,11 +398,11 @@ pyyaml==6.0.1 # prefect querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -423,9 +418,9 @@ requests==2.31.0 # tensorboard requests-oauthlib==1.3.1 # via google-auth-oauthlib -rich==13.5.2 +rich==13.5.3 # via -r docker/pip-tools/worker-requirements.in -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -433,7 +428,7 @@ rq==1.15.1 # via dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -441,7 +436,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) @@ -459,13 +454,12 @@ six==1.16.0 # adversarial-robustness-toolbox # astunparse # databricks-cli - # google-auth # google-pasta # imgaug # python-dateutil # querystring-parser # tensorflow-cpu -smmap==5.0.0 +smmap==5.0.1 # via gitdb sortedcontainers==2.4.0 # via distributed @@ -497,7 +491,7 @@ tensorflow-cpu==2.12.1 ; (sys_platform == "linux" or sys_platform == "win32" or # via -r requirements-dev-tensorflow.in tensorflow-estimator==2.12.0 # via tensorflow-cpu -tensorflow-io-gcs-filesystem==0.32.0 +tensorflow-io-gcs-filesystem==0.34.0 # via tensorflow-cpu termcolor==2.3.0 # via tensorflow-cpu @@ -505,7 +499,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image toml==0.10.2 # via prefect @@ -514,9 +508,9 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via distributed -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # moviepy @@ -532,10 +526,9 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # prefect # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -544,7 +537,7 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # astunparse # tensorboard @@ -556,7 +549,7 @@ wtforms[email]==3.0.1 # flask-wtf zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/docker/requirements/linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt b/docker/requirements/linux-amd64-py3.9-tensorflow2-gpu-requirements.txt similarity index 88% rename from docker/requirements/linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt rename to docker/requirements/linux-amd64-py3.9-tensorflow2-gpu-requirements.txt index 4c78c2ae3..d93c2f278 100644 --- a/docker/requirements/linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt +++ b/docker/requirements/linux-amd64-py3.9-tensorflow2-gpu-requirements.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-tensorflow-gpu.in +# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-tensorflow2-gpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-tensorflow-gpu.in # -absl-py==1.4.0 +absl-py==2.0.0 # via # tensorboard # tensorflow -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate @@ -19,15 +19,15 @@ aniso8601==9.0.1 # via flask-restx astunparse==1.6.3 # via tensorflow -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis attrs==23.1.0 # via # jsonschema # referencing -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -37,11 +37,11 @@ cachetools==5.3.1 # via google-auth certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # dask # databricks-cli @@ -60,7 +60,7 @@ cloudpickle==2.2.1 # prefect cma==3.3.0 # via -r docker/pip-tools/worker-requirements.in -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib croniter==1.4.1 # via prefect @@ -68,17 +68,17 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow decorator==4.4.2 # via moviepy -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -94,7 +94,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -105,9 +104,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -121,17 +118,17 @@ flask-wtf==1.1.1 # via dioptra (pyproject.toml) flatbuffers==23.5.26 # via tensorflow -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib -fsspec==2023.6.0 +fsspec==2023.9.2 # via dask gast==0.4.0 # via tensorflow gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -141,7 +138,7 @@ google-pasta==0.2.0 # via tensorflow greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via # tensorboard # tensorflow @@ -157,12 +154,12 @@ idna==3.4 # via # email-validator # requests -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # moviepy # scikit-image -imageio-ffmpeg==0.4.8 +imageio-ffmpeg==0.4.9 # via moviepy imgaug==0.4.0 # via dioptra (pyproject.toml) @@ -174,19 +171,17 @@ importlib-metadata==5.2.0 # jax # markdown # mlflow -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) itsdangerous==2.1.2 # via # flask # flask-wtf -jax==0.4.14 +jax==0.4.16 # via tensorflow jinja2==3.1.2 # via @@ -196,9 +191,9 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx @@ -206,7 +201,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema keras==2.12.0 # via tensorflow -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -237,7 +232,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -245,17 +240,17 @@ matplotlib==3.7.2 # pycocotools mdurl==0.1.2 # via markdown-it-py -ml-dtypes==0.2.0 +ml-dtypes==0.3.1 # via jax mlflow==1.30.1 # via dioptra (pyproject.toml) moviepy==1.0.3 # via gym -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) mypy-extensions==1.0.0 # via prefect @@ -293,7 +288,7 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -317,13 +312,13 @@ pandas==1.5.3 # via # dioptra (pyproject.toml) # mlflow -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via dioptra (pyproject.toml) pendulum==2.1.2 # via prefect -pillow==10.0.0 +pillow==10.0.1 # via # -r docker/pip-tools/worker-requirements.in # imageio @@ -338,16 +333,16 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard # tensorflow psutil==5.9.5 # via distributed -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/worker-requirements.in -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -355,21 +350,21 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pycocotools==2.0.6 +pycocotools==2.0.7 # via -r docker/pip-tools/worker-requirements.in pycparser==2.21 # via cffi pygame==2.1.0 # via gym -pygments==2.15.1 +pygments==2.16.1 # via rich pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via # matplotlib # packaging -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -403,11 +398,11 @@ pyyaml==6.0.1 # prefect querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -423,9 +418,9 @@ requests==2.31.0 # tensorboard requests-oauthlib==1.3.1 # via google-auth-oauthlib -rich==13.5.2 +rich==13.5.3 # via -r docker/pip-tools/worker-requirements.in -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -433,7 +428,7 @@ rq==1.15.1 # via dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -441,7 +436,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) @@ -459,13 +454,12 @@ six==1.16.0 # adversarial-robustness-toolbox # astunparse # databricks-cli - # google-auth # google-pasta # imgaug # python-dateutil # querystring-parser # tensorflow -smmap==5.0.0 +smmap==5.0.1 # via gitdb sortedcontainers==2.4.0 # via distributed @@ -497,7 +491,7 @@ tensorflow==2.12.1 ; sys_platform == "linux" and (platform_machine == "x86_64" o # via -r requirements-dev-tensorflow-gpu.in tensorflow-estimator==2.12.0 # via tensorflow -tensorflow-io-gcs-filesystem==0.32.0 +tensorflow-io-gcs-filesystem==0.34.0 # via tensorflow termcolor==2.3.0 # via tensorflow @@ -505,7 +499,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image toml==0.10.2 # via prefect @@ -514,9 +508,9 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via distributed -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # moviepy @@ -532,10 +526,9 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # prefect # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -544,7 +537,7 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # astunparse # tensorboard @@ -556,7 +549,7 @@ wtforms[email]==3.0.1 # flask-wtf zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/docker/requirements/linux-aarch64-py3.9-mlflow-tracking-requirements.txt b/docker/requirements/linux-arm64-py3.9-mlflow-tracking-requirements.txt similarity index 84% rename from docker/requirements/linux-aarch64-py3.9-mlflow-tracking-requirements.txt rename to docker/requirements/linux-arm64-py3.9-mlflow-tracking-requirements.txt index 7eb0bc08c..78b5e819c 100644 --- a/docker/requirements/linux-aarch64-py3.9-mlflow-tracking-requirements.txt +++ b/docker/requirements/linux-arm64-py3.9-mlflow-tracking-requirements.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --config=pyproject.toml --output-file=venvs/linux-aarch64-py3.9-mlflow-tracking-requirements.txt docker/pip-tools/mlflow-tracking-requirements.in +# pip-compile --output-file=venvs/linux-arm64-py3.9-mlflow-tracking-requirements.txt docker/pip-tools/mlflow-tracking-requirements.in # -alembic==1.11.1 +alembic==1.12.0 # via mlflow blinker==1.6.2 # via flask -boto3==1.28.16 +boto3==1.28.57 # via -r docker/pip-tools/mlflow-tracking-requirements.in -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -18,26 +18,26 @@ certifi==2023.7.22 # via requests charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # databricks-cli # flask # mlflow cloudpickle==2.2.1 # via mlflow -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow docker==6.1.3 # via mlflow entrypoints==0.4 # via mlflow -flask==2.3.2 +flask==2.3.3 # via # mlflow # prometheus-flask-exporter gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -66,7 +66,7 @@ markupsafe==2.1.3 # werkzeug mlflow==1.30.1 # via -r docker/pip-tools/mlflow-tracking-requirements.in -numpy==1.25.2 +numpy==1.26.0 # via # mlflow # pandas @@ -83,9 +83,9 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/mlflow-tracking-requirements.in pyjwt==2.8.0 # via databricks-cli @@ -108,9 +108,9 @@ requests==2.31.0 # databricks-cli # docker # mlflow -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 -scipy==1.11.1 +scipy==1.11.3 # via mlflow simplejson==3.19.1 # via -r docker/pip-tools/mlflow-tracking-requirements.in @@ -119,7 +119,7 @@ six==1.16.0 # databricks-cli # python-dateutil # querystring-parser -smmap==5.0.0 +smmap==5.0.1 # via gitdb sqlalchemy==1.4.49 # via @@ -129,7 +129,7 @@ sqlparse==0.4.4 # via mlflow tabulate==0.9.0 # via databricks-cli -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via alembic urllib3==1.26.16 # via @@ -137,11 +137,11 @@ urllib3==1.26.16 # databricks-cli # docker # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker -werkzeug==2.3.6 +werkzeug==2.3.7 # via flask -zipp==3.16.2 +zipp==3.17.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/docker/requirements/linux-aarch64-py3.9-pytorch-cpu-requirements.txt b/docker/requirements/linux-arm64-py3.9-pytorch-cpu-requirements.txt similarity index 88% rename from docker/requirements/linux-aarch64-py3.9-pytorch-cpu-requirements.txt rename to docker/requirements/linux-arm64-py3.9-pytorch-cpu-requirements.txt index f804b66c5..89033abcf 100644 --- a/docker/requirements/linux-aarch64-py3.9-pytorch-cpu-requirements.txt +++ b/docker/requirements/linux-arm64-py3.9-pytorch-cpu-requirements.txt @@ -2,27 +2,27 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-aarch64-py3.9-pytorch-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-pytorch.in +# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-arm64-py3.9-pytorch-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-pytorch.in # --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html -absl-py==1.4.0 +absl-py==2.0.0 # via tensorboard -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via fsspec aiosignal==1.3.1 # via aiohttp -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate # mlflow aniso8601==9.0.1 # via flask-restx -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -31,9 +31,9 @@ attrs==23.1.0 # aiohttp # jsonschema # referencing -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -43,13 +43,13 @@ cachetools==5.3.1 # via google-auth certifi==2023.7.22 # via requests -cffi==1.15.1 +cffi==1.16.0 # via cryptography charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # dask # databricks-cli @@ -68,7 +68,7 @@ cloudpickle==2.2.1 # prefect cma==3.3.0 # via -r docker/pip-tools/worker-requirements.in -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib croniter==1.4.1 # via prefect @@ -76,17 +76,17 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow decorator==4.4.2 # via moviepy -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -102,7 +102,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -113,9 +112,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -127,21 +124,21 @@ flask-sqlalchemy==2.5.1 # flask-migrate flask-wtf==1.1.1 # via dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -149,7 +146,7 @@ google-auth-oauthlib==1.0.0 # via tensorboard greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via tensorboard gunicorn==20.1.0 # via mlflow @@ -162,12 +159,12 @@ idna==3.4 # email-validator # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # moviepy # scikit-image -imageio-ffmpeg==0.4.8 +imageio-ffmpeg==0.4.9 # via moviepy imgaug==0.4.0 # via dioptra (pyproject.toml) @@ -178,14 +175,12 @@ importlib-metadata==5.2.0 # gym # markdown # mlflow -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) itsdangerous==2.1.2 # via # flask @@ -198,15 +193,15 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx jsonschema-specifications==2023.7.1 # via jsonschema -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -239,7 +234,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -251,7 +246,7 @@ mlflow==1.30.1 # via dioptra (pyproject.toml) moviepy==1.0.3 # via gym -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -259,13 +254,13 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) mypy-extensions==1.0.0 # via prefect networkx==3.1 # via scikit-image -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # cma @@ -295,7 +290,7 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via # -r docker/pip-tools/worker-requirements.in # gym @@ -312,18 +307,17 @@ packaging==21.3 # prefect # pytorch-lightning # scikit-image - # torchmetrics pandas==1.5.3 # via # dioptra (pyproject.toml) # mlflow -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via dioptra (pyproject.toml) pendulum==2.1.2 # via prefect -pillow==10.0.0 +pillow==10.0.1 # via # -r docker/pip-tools/worker-requirements.in # imageio @@ -339,15 +333,15 @@ prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard psutil==5.9.5 # via distributed -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/worker-requirements.in -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -355,21 +349,21 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pycocotools==2.0.6 +pycocotools==2.0.7 # via -r docker/pip-tools/worker-requirements.in pycparser==2.21 # via cffi pygame==2.1.0 # via gym -pygments==2.15.1 +pygments==2.16.1 # via rich pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via # matplotlib # packaging -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -406,11 +400,11 @@ pyyaml==6.0.1 # pytorch-lightning querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -427,9 +421,9 @@ requests==2.31.0 # tensorboard requests-oauthlib==1.3.1 # via google-auth-oauthlib -rich==13.5.2 +rich==13.5.3 # via -r docker/pip-tools/worker-requirements.in -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -437,7 +431,7 @@ rq==1.15.1 # via dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -445,7 +439,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) @@ -461,11 +455,11 @@ six==1.16.0 # via # adversarial-robustness-toolbox # databricks-cli - # google-auth # imgaug # python-dateutil # querystring-parser -smmap==5.0.0 + # tensorboard +smmap==5.0.1 # via gitdb sortedcontainers==2.4.0 # via distributed @@ -487,7 +481,7 @@ tabulate==0.9.0 # prefect tblib==2.0.0 # via distributed -tensorboard==2.13.0 +tensorboard==2.14.1 # via -r docker/pip-tools/worker-requirements.in tensorboard-data-server==0.7.1 # via tensorboard @@ -495,7 +489,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image toml==0.10.2 # via prefect @@ -513,19 +507,19 @@ torch==1.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_pl # torchvision torchaudio==0.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via distributed -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # moviepy # proglog # pytorch-lightning -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # dioptra (pyproject.toml) @@ -538,10 +532,9 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # prefect # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -550,8 +543,6 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 - # via tensorboard wtforms[email]==3.0.1 # via # dioptra (pyproject.toml) @@ -560,7 +551,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/docker/requirements/linux-aarch64-py3.9-restapi-requirements.txt b/docker/requirements/linux-arm64-py3.9-restapi-requirements.txt similarity index 87% rename from docker/requirements/linux-aarch64-py3.9-restapi-requirements.txt rename to docker/requirements/linux-arm64-py3.9-restapi-requirements.txt index 55f37ac41..dc48614bf 100644 --- a/docker/requirements/linux-aarch64-py3.9-restapi-requirements.txt +++ b/docker/requirements/linux-arm64-py3.9-restapi-requirements.txt @@ -2,24 +2,24 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --output-file=venvs/linux-aarch64-py3.9-restapi-requirements.txt docker/pip-tools/restapi-requirements.in pyproject.toml +# pip-compile --output-file=venvs/linux-arm64-py3.9-restapi-requirements.txt docker/pip-tools/restapi-requirements.in pyproject.toml # -alembic==1.11.1 +alembic==1.12.0 # via # dioptra (pyproject.toml) # flask-migrate # mlflow aniso8601==9.0.1 # via flask-restx -async-timeout==4.0.2 +async-timeout==4.0.3 # via redis attrs==23.1.0 # via # jsonschema # referencing -boto3==1.28.16 +boto3==1.28.57 # via dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer @@ -27,7 +27,7 @@ certifi==2023.7.22 # via requests charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # databricks-cli # dioptra (pyproject.toml) @@ -36,9 +36,9 @@ click==8.1.6 # rq cloudpickle==2.2.1 # via mlflow -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via mlflow @@ -52,7 +52,6 @@ flask==2.1.3 # via # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -63,9 +62,7 @@ flask-accepts==0.18.4 # via dioptra (pyproject.toml) flask-cors==4.0.0 # via dioptra (pyproject.toml) -flask-injector==0.14.0 - # via dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via dioptra (pyproject.toml) flask-restx==1.1.0 # via @@ -79,7 +76,7 @@ flask-wtf==1.1.1 # via dioptra (pyproject.toml) gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -96,9 +93,7 @@ importlib-metadata==5.2.0 # flask # mlflow injector==0.21.0 - # via - # dioptra (pyproject.toml) - # flask-injector + # via dioptra (pyproject.toml) itsdangerous==2.1.2 # via # flask @@ -109,7 +104,7 @@ jmespath==1.0.1 # via # boto3 # botocore -jsonschema==4.18.4 +jsonschema==4.19.1 # via # dioptra (pyproject.toml) # flask-restx @@ -128,9 +123,9 @@ marshmallow==3.20.1 # flask-accepts mlflow==1.30.1 # via dioptra (pyproject.toml) -multimethod==1.9.1 +multimethod==1.10 # via dioptra (pyproject.toml) -numpy==1.25.2 +numpy==1.26.0 # via # dioptra (pyproject.toml) # mlflow @@ -149,15 +144,15 @@ pandas==1.5.3 # mlflow passlib==1.7.4 # via dioptra (pyproject.toml) -pillow==10.0.0 +pillow==10.0.1 # via -r docker/pip-tools/restapi-requirements.in prometheus-client==0.17.1 # via prometheus-flask-exporter prometheus-flask-exporter==0.22.4 # via mlflow -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow -psycopg2-binary==2.9.6 +psycopg2-binary==2.9.8 # via -r docker/pip-tools/restapi-requirements.in pyjwt==2.8.0 # via databricks-cli @@ -181,11 +176,11 @@ pyyaml==6.0.1 # mlflow querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -195,15 +190,15 @@ requests==2.31.0 # dioptra (pyproject.toml) # docker # mlflow -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing rq==1.15.1 # via dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 -scipy==1.11.1 +scipy==1.11.3 # via # dioptra (pyproject.toml) # mlflow @@ -214,7 +209,7 @@ six==1.16.0 # databricks-cli # python-dateutil # querystring-parser -smmap==5.0.0 +smmap==5.0.1 # via gitdb sqlalchemy==1.4.49 # via @@ -228,7 +223,7 @@ structlog==23.1.0 # via dioptra (pyproject.toml) tabulate==0.9.0 # via databricks-cli -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # dioptra (pyproject.toml) @@ -238,7 +233,7 @@ urllib3==1.26.16 # databricks-cli # docker # requests -websocket-client==1.6.1 +websocket-client==1.6.3 # via docker werkzeug==2.1.2 # via @@ -250,7 +245,7 @@ wtforms[email]==3.0.1 # via # dioptra (pyproject.toml) # flask-wtf -zipp==3.16.2 +zipp==3.17.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/docker/requirements/linux-arm64-py3.9-tensorflow2-cpu-requirements.txt b/docker/requirements/linux-arm64-py3.9-tensorflow2-cpu-requirements.txt new file mode 100644 index 000000000..37eec4456 --- /dev/null +++ b/docker/requirements/linux-arm64-py3.9-tensorflow2-cpu-requirements.txt @@ -0,0 +1,558 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --extra=sdk --extra=taskplugins --output-file=venvs/linux-arm64-py3.9-tensorflow2-cpu-requirements.txt docker/pip-tools/worker-requirements.in pyproject.toml requirements-dev-tensorflow.in +# +absl-py==2.0.0 + # via + # tensorboard + # tensorflow-cpu-aws +adversarial-robustness-toolbox==1.16.0 + # via dioptra (pyproject.toml) +alembic==1.12.0 + # via + # dioptra (pyproject.toml) + # flask-migrate + # mlflow +aniso8601==9.0.1 + # via flask-restx +astunparse==1.6.3 + # via tensorflow-cpu-aws +async-timeout==4.0.3 + # via redis +attrs==23.1.0 + # via + # jsonschema + # referencing +boto3==1.28.57 + # via dioptra (pyproject.toml) +botocore==1.31.57 + # via + # boto3 + # s3transfer +box2d-py==2.3.5 + # via gym +cachetools==5.3.1 + # via google-auth +certifi==2023.7.22 + # via requests +cffi==1.16.0 + # via cryptography +charset-normalizer==3.2.0 + # via requests +click==8.1.7 + # via + # dask + # databricks-cli + # dioptra (pyproject.toml) + # distributed + # flask + # mlflow + # prefect + # rq +cloudpickle==2.2.1 + # via + # dask + # distributed + # gym + # mlflow + # prefect +cma==3.3.0 + # via -r docker/pip-tools/worker-requirements.in +contourpy==1.1.1 + # via matplotlib +croniter==1.4.1 + # via prefect +cryptography==3.4.8 + # via dioptra (pyproject.toml) +cycler==0.11.0 + # via matplotlib +dask==2023.9.2 + # via + # distributed + # prefect +databricks-cli==0.17.8 + # via mlflow +decorator==4.4.2 + # via moviepy +distributed==2023.9.2 + # via prefect +dnspython==2.4.2 + # via email-validator +docker==6.1.3 + # via + # mlflow + # prefect +email-validator==2.0.0.post2 + # via wtforms +entrypoints==0.4 + # via + # dioptra (pyproject.toml) + # mlflow +flask==2.1.3 + # via + # dioptra (pyproject.toml) + # flask-cors + # flask-migrate + # flask-restx + # flask-sqlalchemy + # flask-wtf + # mlflow + # prometheus-flask-exporter +flask-accepts==0.18.4 + # via dioptra (pyproject.toml) +flask-cors==4.0.0 + # via dioptra (pyproject.toml) +flask-migrate==4.0.5 + # via dioptra (pyproject.toml) +flask-restx==1.1.0 + # via + # dioptra (pyproject.toml) + # flask-accepts +flask-sqlalchemy==2.5.1 + # via + # dioptra (pyproject.toml) + # flask-migrate +flask-wtf==1.1.1 + # via dioptra (pyproject.toml) +flatbuffers==23.5.26 + # via tensorflow-cpu-aws +fonttools==4.42.1 + # via matplotlib +fsspec==2023.9.2 + # via dask +gast==0.4.0 + # via tensorflow-cpu-aws +gitdb==4.0.10 + # via gitpython +gitpython==3.1.37 + # via mlflow +google-auth==2.23.2 + # via + # google-auth-oauthlib + # tensorboard +google-auth-oauthlib==1.0.0 + # via tensorboard +google-pasta==0.2.0 + # via tensorflow-cpu-aws +greenlet==2.0.2 + # via sqlalchemy +grpcio==1.58.0 + # via + # tensorboard + # tensorflow-cpu-aws +gunicorn==20.1.0 + # via mlflow +gym[box2d,classic_control,other,toy_text]==0.26.2 + # via -r docker/pip-tools/worker-requirements.in +gym-notices==0.0.8 + # via gym +h5py==3.9.0 + # via tensorflow-cpu-aws +idna==3.4 + # via + # email-validator + # requests +imageio==2.31.4 + # via + # imgaug + # moviepy + # scikit-image +imageio-ffmpeg==0.4.9 + # via moviepy +imgaug==0.4.0 + # via dioptra (pyproject.toml) +importlib-metadata==5.2.0 + # via + # dask + # flask + # gym + # jax + # markdown + # mlflow +importlib-resources==6.1.0 + # via + # matplotlib + # prefect +injector==0.21.0 + # via dioptra (pyproject.toml) +itsdangerous==2.1.2 + # via + # flask + # flask-wtf +jax==0.4.16 + # via tensorflow-cpu-aws +jinja2==3.1.2 + # via + # distributed + # flask +jmespath==1.0.1 + # via + # boto3 + # botocore +joblib==1.3.2 + # via scikit-learn +jsonschema==4.19.1 + # via + # dioptra (pyproject.toml) + # flask-restx +jsonschema-specifications==2023.7.1 + # via jsonschema +keras==2.12.0 + # via tensorflow-cpu-aws +kiwisolver==1.4.5 + # via matplotlib +lazy-loader==0.3 + # via scikit-image +libclang==16.0.6 + # via tensorflow-cpu-aws +locket==1.0.0 + # via + # distributed + # partd +lz4==4.3.2 + # via gym +mako==1.2.4 + # via alembic +markdown==3.4.4 + # via tensorboard +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.3 + # via + # jinja2 + # mako + # wtforms +marshmallow==3.20.1 + # via + # dioptra (pyproject.toml) + # flask-accepts + # marshmallow-oneofschema + # prefect +marshmallow-oneofschema==3.0.1 + # via prefect +matplotlib==3.8.0 + # via + # -r docker/pip-tools/worker-requirements.in + # gym + # imgaug + # pycocotools +mdurl==0.1.2 + # via markdown-it-py +ml-dtypes==0.3.1 + # via jax +mlflow==1.30.1 + # via dioptra (pyproject.toml) +moviepy==1.0.3 + # via gym +msgpack==1.0.7 + # via + # distributed + # prefect +multimethod==1.10 + # via dioptra (pyproject.toml) +mypy-extensions==1.0.0 + # via prefect +networkx==3.1 + # via scikit-image +numpy==1.24.3 + # via + # adversarial-robustness-toolbox + # cma + # contourpy + # dioptra (pyproject.toml) + # gym + # h5py + # imageio + # imgaug + # jax + # matplotlib + # ml-dtypes + # mlflow + # moviepy + # opencv-python + # opt-einsum + # pandas + # pyarrow + # pycocotools + # pywavelets + # scikit-image + # scikit-learn + # scipy + # shapely + # tensorboard + # tensorflow-cpu-aws + # tifffile +oauthlib==3.2.2 + # via + # databricks-cli + # requests-oauthlib +opencv-python==4.8.1.78 + # via + # -r docker/pip-tools/worker-requirements.in + # gym + # imgaug +opt-einsum==3.3.0 + # via + # jax + # tensorflow-cpu-aws +packaging==21.3 + # via + # dask + # distributed + # docker + # marshmallow + # matplotlib + # mlflow + # prefect + # scikit-image + # tensorflow-cpu-aws +pandas==1.5.3 + # via + # dioptra (pyproject.toml) + # mlflow +partd==1.4.1 + # via dask +passlib==1.7.4 + # via dioptra (pyproject.toml) +pendulum==2.1.2 + # via prefect +pillow==10.0.1 + # via + # -r docker/pip-tools/worker-requirements.in + # imageio + # imgaug + # matplotlib + # scikit-image +prefect==1.4.1 + # via dioptra (pyproject.toml) +proglog==0.1.10 + # via moviepy +prometheus-client==0.17.1 + # via prometheus-flask-exporter +prometheus-flask-exporter==0.22.4 + # via mlflow +protobuf==4.24.3 + # via + # mlflow + # tensorboard + # tensorflow-cpu-aws +psutil==5.9.5 + # via distributed +psycopg2-binary==2.9.8 + # via -r docker/pip-tools/worker-requirements.in +pyarrow==13.0.0 + # via dioptra (pyproject.toml) +pyasn1==0.5.0 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 + # via google-auth +pycocotools==2.0.7 + # via -r docker/pip-tools/worker-requirements.in +pycparser==2.21 + # via cffi +pygame==2.1.0 + # via gym +pygments==2.16.1 + # via rich +pyjwt==2.8.0 + # via databricks-cli +pyparsing==3.1.1 + # via + # matplotlib + # packaging +python-box==7.1.1 + # via prefect +python-dateutil==2.8.2 + # via + # botocore + # croniter + # dioptra (pyproject.toml) + # matplotlib + # pandas + # pendulum + # prefect +python-json-logger==2.0.7 + # via -r docker/pip-tools/worker-requirements.in +python-slugify==8.0.1 + # via prefect +pytz==2022.7.1 + # via + # flask-restx + # mlflow + # pandas + # prefect +pytzdata==2020.1 + # via pendulum +pywavelets==1.4.1 + # via scikit-image +pyyaml==6.0.1 + # via + # dask + # dioptra (pyproject.toml) + # distributed + # mlflow + # prefect +querystring-parser==1.2.4 + # via mlflow +redis==5.0.1 + # via + # dioptra (pyproject.toml) + # rq +referencing==0.30.2 + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 + # via + # databricks-cli + # dioptra (pyproject.toml) + # docker + # mlflow + # moviepy + # prefect + # requests-oauthlib + # tensorboard +requests-oauthlib==1.3.1 + # via google-auth-oauthlib +rich==13.5.3 + # via -r docker/pip-tools/worker-requirements.in +rpds-py==0.10.3 + # via + # jsonschema + # referencing +rq==1.15.1 + # via dioptra (pyproject.toml) +rsa==4.9 + # via google-auth +s3transfer==0.7.0 + # via boto3 +scikit-image==0.21.0 + # via imgaug +scikit-learn==1.0.2 + # via + # adversarial-robustness-toolbox + # dioptra (pyproject.toml) +scipy==1.11.3 + # via + # adversarial-robustness-toolbox + # dioptra (pyproject.toml) + # imgaug + # jax + # mlflow + # scikit-image + # scikit-learn +shapely==2.0.1 + # via imgaug +simplejson==3.19.1 + # via -r docker/pip-tools/worker-requirements.in +six==1.16.0 + # via + # adversarial-robustness-toolbox + # astunparse + # databricks-cli + # google-pasta + # imgaug + # python-dateutil + # querystring-parser + # tensorflow-cpu-aws +smmap==5.0.1 + # via gitdb +sortedcontainers==2.4.0 + # via distributed +sqlalchemy==1.4.49 + # via + # alembic + # dioptra (pyproject.toml) + # flask-sqlalchemy + # mlflow +sqlparse==0.4.4 + # via mlflow +structlog==23.1.0 + # via dioptra (pyproject.toml) +swig==4.1.1 + # via gym +tabulate==0.9.0 + # via + # databricks-cli + # prefect +tblib==2.0.0 + # via distributed +tensorboard==2.12.3 + # via + # -r docker/pip-tools/worker-requirements.in + # tensorflow-cpu-aws +tensorboard-data-server==0.7.1 + # via tensorboard +tensorflow-cpu-aws==2.12.1 ; sys_platform == "linux" and (platform_machine == "aarch64" or platform_machine == "arm64") + # via -r requirements-dev-tensorflow.in +tensorflow-estimator==2.12.0 + # via tensorflow-cpu-aws +tensorflow-io-gcs-filesystem==0.34.0 + # via tensorflow-cpu-aws +termcolor==2.3.0 + # via tensorflow-cpu-aws +text-unidecode==1.3 + # via python-slugify +threadpoolctl==3.2.0 + # via scikit-learn +tifffile==2023.9.26 + # via scikit-image +toml==0.10.2 + # via prefect +toolz==0.12.0 + # via + # dask + # distributed + # partd +tornado==6.3.3 + # via distributed +tqdm==4.66.1 + # via + # adversarial-robustness-toolbox + # moviepy + # proglog +typing-extensions==4.5.0 + # via + # alembic + # dioptra (pyproject.toml) + # tensorflow-cpu-aws +urllib3==1.26.16 + # via + # botocore + # databricks-cli + # distributed + # docker + # prefect + # requests +websocket-client==1.6.3 + # via docker +werkzeug==2.1.2 + # via + # dioptra (pyproject.toml) + # flask + # flask-accepts + # flask-restx + # tensorboard +wheel==0.41.2 + # via + # astunparse + # tensorboard +wrapt==1.14.1 + # via tensorflow-cpu-aws +wtforms[email]==3.0.1 + # via + # dioptra (pyproject.toml) + # flask-wtf +zict==3.0.0 + # via distributed +zipp==3.17.0 + # via + # importlib-metadata + # importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/docs/source/getting-started/running-dioptra.rst b/docs/source/getting-started/running-dioptra.rst index 47de188b9..12ec6e0bc 100644 --- a/docs/source/getting-started/running-dioptra.rst +++ b/docs/source/getting-started/running-dioptra.rst @@ -483,6 +483,23 @@ The ``:ro`` at the end will mount the NFS share as read-only within the worker c - worker-etc-ssl:/etc/ssl:rw - dioptra-datasets:/datasets:ro +Assigning multiple GPUs per worker +################################## + +To assign multiple GPUs to a worker, modify the ``NVIDIA_VISIBLE_DEVICES`` environment variable that is set in the **tfgpu** and **pytorch-gpu** container blocks: + +.. code:: yaml + + environment: + NVIDIA_VISIBLE_DEVICES: 0,1 + +To allow a worker to use all available GPUs, set ``NVIDIA_VISIBLE_DEVICES`` to ``all``: + +.. code:: yaml + + environment: + NVIDIA_VISIBLE_DEVICES: all + .. _getting-started-running-dioptra-init-deployment: Initializing the deployment diff --git a/examples/pytorch-mnist-membership-inference/src/mi.py b/examples/pytorch-mnist-membership-inference/src/mi.py index 5666c851d..328c2df42 100644 --- a/examples/pytorch-mnist-membership-inference/src/mi.py +++ b/examples/pytorch-mnist-membership-inference/src/mi.py @@ -21,7 +21,6 @@ import click import mlflow -import numpy as np import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/task-plugins/dioptra_custom/evaluation/mlflow.py b/examples/task-plugins/dioptra_custom/evaluation/mlflow.py new file mode 100644 index 000000000..eb321b88d --- /dev/null +++ b/examples/task-plugins/dioptra_custom/evaluation/mlflow.py @@ -0,0 +1,94 @@ +# This Software (Dioptra) is being made available as a public service by the +# National Institute of Standards and Technology (NIST), an Agency of the United +# States Department of Commerce. This software was developed in part by employees of +# NIST and in part by NIST contractors. Copyright in portions of this software that +# were developed by NIST contractors has been licensed or assigned to NIST. Pursuant +# to Title 17 United States Code Section 105, works of NIST employees are not +# subject to copyright protection in the United States. However, NIST may hold +# international copyright in software created by its employees and domestic +# copyright (or licensing rights) in portions of software that were assigned or +# licensed to NIST. To the extent that NIST holds copyright in this software, it is +# being made available under the Creative Commons Attribution 4.0 International +# license (CC BY 4.0). The disclaimers of the CC BY 4.0 license apply to all parts +# of the software developed or licensed by NIST. +# +# ACCESS THE FULL CC BY 4.0 LICENSE HERE: +# https://creativecommons.org/licenses/by/4.0/legalcode +"""A task plugin module for using the MLFlow model registry.""" + +from __future__ import annotations + +from typing import Optional + +import mlflow +import structlog +from mlflow.entities.model_registry import ModelVersion +from mlflow.tracking import MlflowClient +from structlog.stdlib import BoundLogger + +from dioptra import pyplugs + +LOGGER: BoundLogger = structlog.stdlib.get_logger() + + +@pyplugs.register +def add_model_to_registry( + name: str, model_dir: str +) -> Optional[ModelVersion]: + """Registers a trained model logged during the current run to the MLFlow registry. + + Args: + active_run: The :py:class:`mlflow.ActiveRun` object managing the current run's + state. + name: The registration name to use for the model. + model_dir: The relative artifact directory where MLFlow logged the model trained + during the current run. + + Returns: + A :py:class:`~mlflow.entities.model_registry.ModelVersion` object created by the + backend. + """ + if not name.strip(): + return None + + active_run = mlflow.active_run() + + run_id: str = active_run.info.run_id + artifact_uri: str = active_run.info.artifact_uri + source: str = f"{artifact_uri}/{model_dir}" + + registered_models = [x.name for x in MlflowClient().list_registered_models()] + + if name not in registered_models: + LOGGER.info("create registered model", name=name) + MlflowClient().create_registered_model(name=name) + + LOGGER.info("create model version", name=name, source=source, run_id=run_id) + model_version: ModelVersion = MlflowClient().create_model_version( + name=name, source=source, run_id=run_id + ) + + return model_version + + +@pyplugs.register +def get_experiment_name() -> str: + """Gets the name of the experiment for the current run. + + Args: + active_run: The :py:class:`mlflow.ActiveRun` object managing the current run's + state. + + Returns: + The name of the experiment. + """ + active_run = mlflow.active_run() + + experiment_name: str = ( + MlflowClient().get_experiment(active_run.info.experiment_id).name + ) + LOGGER.info( + "Obtained experiment name of active run", experiment_name=experiment_name + ) + + return experiment_name diff --git a/examples/tensorflow-adversarial-patches/src/deploy_patch.py b/examples/tensorflow-adversarial-patches/src/deploy_patch.py index adc74e654..b98febab8 100644 --- a/examples/tensorflow-adversarial-patches/src/deploy_patch.py +++ b/examples/tensorflow-adversarial-patches/src/deploy_patch.py @@ -15,7 +15,6 @@ # # ACCESS THE FULL CC BY 4.0 LICENSE HERE: # https://creativecommons.org/licenses/by/4.0/legalcode - import os from pathlib import Path from typing import Dict, List @@ -23,6 +22,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/gaussian_augmentation.py b/examples/tensorflow-adversarial-patches/src/gaussian_augmentation.py index 441d156e9..c10710c68 100644 --- a/examples/tensorflow-adversarial-patches/src/gaussian_augmentation.py +++ b/examples/tensorflow-adversarial-patches/src/gaussian_augmentation.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/generate_patch.py b/examples/tensorflow-adversarial-patches/src/generate_patch.py index f678d1786..88ed4ba3c 100644 --- a/examples/tensorflow-adversarial-patches/src/generate_patch.py +++ b/examples/tensorflow-adversarial-patches/src/generate_patch.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/infer.py b/examples/tensorflow-adversarial-patches/src/infer.py index 824480a56..4e763aaa5 100644 --- a/examples/tensorflow-adversarial-patches/src/infer.py +++ b/examples/tensorflow-adversarial-patches/src/infer.py @@ -22,6 +22,7 @@ import click import mlflow import mlflow.tensorflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/init_model.py b/examples/tensorflow-adversarial-patches/src/init_model.py index e98b4f58e..fcc01a8e1 100644 --- a/examples/tensorflow-adversarial-patches/src/init_model.py +++ b/examples/tensorflow-adversarial-patches/src/init_model.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/jpeg_compression.py b/examples/tensorflow-adversarial-patches/src/jpeg_compression.py index 21db45781..a0f0ae4c7 100644 --- a/examples/tensorflow-adversarial-patches/src/jpeg_compression.py +++ b/examples/tensorflow-adversarial-patches/src/jpeg_compression.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/spatial_smoothing.py b/examples/tensorflow-adversarial-patches/src/spatial_smoothing.py index 4e891ea77..f81f11c13 100644 --- a/examples/tensorflow-adversarial-patches/src/spatial_smoothing.py +++ b/examples/tensorflow-adversarial-patches/src/spatial_smoothing.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/train.py b/examples/tensorflow-adversarial-patches/src/train.py index f60260a4d..21493866d 100644 --- a/examples/tensorflow-adversarial-patches/src/train.py +++ b/examples/tensorflow-adversarial-patches/src/train.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/train_on_Fruits360_patched.py b/examples/tensorflow-adversarial-patches/src/train_on_Fruits360_patched.py index baaca5b3e..a5b95c43a 100644 --- a/examples/tensorflow-adversarial-patches/src/train_on_Fruits360_patched.py +++ b/examples/tensorflow-adversarial-patches/src/train_on_Fruits360_patched.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-adversarial-patches/src/train_on_Mnist_patched.py b/examples/tensorflow-adversarial-patches/src/train_on_Mnist_patched.py index baaca5b3e..a5b95c43a 100644 --- a/examples/tensorflow-adversarial-patches/src/train_on_Mnist_patched.py +++ b/examples/tensorflow-adversarial-patches/src/train_on_Mnist_patched.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-backdoor-poisoning/src/gaussian_augmentation.py b/examples/tensorflow-backdoor-poisoning/src/gaussian_augmentation.py index f76245fe0..f592f0f88 100644 --- a/examples/tensorflow-backdoor-poisoning/src/gaussian_augmentation.py +++ b/examples/tensorflow-backdoor-poisoning/src/gaussian_augmentation.py @@ -17,15 +17,14 @@ # https://creativecommons.org/licenses/by/4.0/legalcode import os -import tarfile from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Dict, List import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog -from mlflow.tracking import MlflowClient from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger diff --git a/examples/tensorflow-backdoor-poisoning/src/gen_poison_clean_data.py b/examples/tensorflow-backdoor-poisoning/src/gen_poison_clean_data.py index cf23e2689..5f26259b3 100644 --- a/examples/tensorflow-backdoor-poisoning/src/gen_poison_clean_data.py +++ b/examples/tensorflow-backdoor-poisoning/src/gen_poison_clean_data.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-backdoor-poisoning/src/gen_poison_data.py b/examples/tensorflow-backdoor-poisoning/src/gen_poison_data.py index 741caf7da..31aef625e 100644 --- a/examples/tensorflow-backdoor-poisoning/src/gen_poison_data.py +++ b/examples/tensorflow-backdoor-poisoning/src/gen_poison_data.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-backdoor-poisoning/src/gen_poison_model.py b/examples/tensorflow-backdoor-poisoning/src/gen_poison_model.py index 030946236..1ed483b93 100644 --- a/examples/tensorflow-backdoor-poisoning/src/gen_poison_model.py +++ b/examples/tensorflow-backdoor-poisoning/src/gen_poison_model.py @@ -17,15 +17,14 @@ # https://creativecommons.org/licenses/by/4.0/legalcode import os -import tarfile from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List import click import mlflow +import sklearn # noqa: F401 import structlog -from mlflow.tracking import MlflowClient -from prefect import Flow, Parameter, case +from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger diff --git a/examples/tensorflow-backdoor-poisoning/src/infer.py b/examples/tensorflow-backdoor-poisoning/src/infer.py index f6ec22c8b..7fab62def 100644 --- a/examples/tensorflow-backdoor-poisoning/src/infer.py +++ b/examples/tensorflow-backdoor-poisoning/src/infer.py @@ -22,6 +22,7 @@ import click import mlflow import mlflow.tensorflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-backdoor-poisoning/src/init_model.py b/examples/tensorflow-backdoor-poisoning/src/init_model.py index 125a96587..83bac99de 100644 --- a/examples/tensorflow-backdoor-poisoning/src/init_model.py +++ b/examples/tensorflow-backdoor-poisoning/src/init_model.py @@ -22,11 +22,11 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger -from tensorflow.keras.applications.resnet_v2 import ResNet50V2 from dioptra import pyplugs from dioptra.sdk.utilities.contexts import plugin_dirs diff --git a/examples/tensorflow-backdoor-poisoning/src/jpeg_compression.py b/examples/tensorflow-backdoor-poisoning/src/jpeg_compression.py index 9de7fccfc..db625baf0 100644 --- a/examples/tensorflow-backdoor-poisoning/src/jpeg_compression.py +++ b/examples/tensorflow-backdoor-poisoning/src/jpeg_compression.py @@ -17,15 +17,14 @@ # https://creativecommons.org/licenses/by/4.0/legalcode import os -import tarfile from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Dict, List import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog -from mlflow.tracking import MlflowClient from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger diff --git a/examples/tensorflow-backdoor-poisoning/src/spatial_smoothing.py b/examples/tensorflow-backdoor-poisoning/src/spatial_smoothing.py index b23b0cdad..d3e06ffda 100644 --- a/examples/tensorflow-backdoor-poisoning/src/spatial_smoothing.py +++ b/examples/tensorflow-backdoor-poisoning/src/spatial_smoothing.py @@ -17,15 +17,14 @@ # https://creativecommons.org/licenses/by/4.0/legalcode import os -import tarfile from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Dict, List import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog -from mlflow.tracking import MlflowClient from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger diff --git a/examples/tensorflow-backdoor-poisoning/src/train.py b/examples/tensorflow-backdoor-poisoning/src/train.py index 944b9777e..c4b493c69 100644 --- a/examples/tensorflow-backdoor-poisoning/src/train.py +++ b/examples/tensorflow-backdoor-poisoning/src/train.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-backdoor-poisoning/src/train_on_run_dataset.py b/examples/tensorflow-backdoor-poisoning/src/train_on_run_dataset.py index 7227c70be..cd43f4f01 100644 --- a/examples/tensorflow-backdoor-poisoning/src/train_on_run_dataset.py +++ b/examples/tensorflow-backdoor-poisoning/src/train_on_run_dataset.py @@ -17,15 +17,14 @@ # https://creativecommons.org/licenses/by/4.0/legalcode import os -import tarfile from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List import click import mlflow +import sklearn # noqa: F401 import structlog -from mlflow.tracking import MlflowClient -from prefect import Flow, Parameter, case +from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger diff --git a/examples/tensorflow-backdoor-poisoning/src/train_with_madry_pgd_defense.py b/examples/tensorflow-backdoor-poisoning/src/train_with_madry_pgd_defense.py index aa8388eca..375743d1c 100644 --- a/examples/tensorflow-backdoor-poisoning/src/train_with_madry_pgd_defense.py +++ b/examples/tensorflow-backdoor-poisoning/src/train_with_madry_pgd_defense.py @@ -17,15 +17,14 @@ # https://creativecommons.org/licenses/by/4.0/legalcode import os -import tarfile from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List import click import mlflow +import sklearn # noqa: F401 import structlog -from mlflow.tracking import MlflowClient -from prefect import Flow, Parameter, case +from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger from structlog.stdlib import BoundLogger diff --git a/examples/tensorflow-imagenet-resnet50/src/fgm.py b/examples/tensorflow-imagenet-resnet50/src/fgm.py index c632083f1..07f06e057 100644 --- a/examples/tensorflow-imagenet-resnet50/src/fgm.py +++ b/examples/tensorflow-imagenet-resnet50/src/fgm.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-imagenet-resnet50/src/gaussian_augmentation.py b/examples/tensorflow-imagenet-resnet50/src/gaussian_augmentation.py index 647f39972..09865dd7b 100644 --- a/examples/tensorflow-imagenet-resnet50/src/gaussian_augmentation.py +++ b/examples/tensorflow-imagenet-resnet50/src/gaussian_augmentation.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-imagenet-resnet50/src/infer.py b/examples/tensorflow-imagenet-resnet50/src/infer.py index 3c78fd863..1ae5c5c7e 100644 --- a/examples/tensorflow-imagenet-resnet50/src/infer.py +++ b/examples/tensorflow-imagenet-resnet50/src/infer.py @@ -22,6 +22,7 @@ import click import mlflow import mlflow.tensorflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-imagenet-resnet50/src/init_model.py b/examples/tensorflow-imagenet-resnet50/src/init_model.py index 4300f1da8..a2d76eec3 100644 --- a/examples/tensorflow-imagenet-resnet50/src/init_model.py +++ b/examples/tensorflow-imagenet-resnet50/src/init_model.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-imagenet-resnet50/src/jpeg_compression.py b/examples/tensorflow-imagenet-resnet50/src/jpeg_compression.py index da72a4c32..65c7ab3b2 100644 --- a/examples/tensorflow-imagenet-resnet50/src/jpeg_compression.py +++ b/examples/tensorflow-imagenet-resnet50/src/jpeg_compression.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-imagenet-resnet50/src/pt.py b/examples/tensorflow-imagenet-resnet50/src/pt.py index 761e9e5e4..2fa0521be 100644 --- a/examples/tensorflow-imagenet-resnet50/src/pt.py +++ b/examples/tensorflow-imagenet-resnet50/src/pt.py @@ -22,6 +22,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-imagenet-resnet50/src/spatial_smoothing.py b/examples/tensorflow-imagenet-resnet50/src/spatial_smoothing.py index e5aab7273..6610abdba 100644 --- a/examples/tensorflow-imagenet-resnet50/src/spatial_smoothing.py +++ b/examples/tensorflow-imagenet-resnet50/src/spatial_smoothing.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-classifier-legacy/README.md b/examples/tensorflow-mnist-classifier-legacy/README.md new file mode 100644 index 000000000..742e60b62 --- /dev/null +++ b/examples/tensorflow-mnist-classifier-legacy/README.md @@ -0,0 +1,22 @@ +# Tensorflow MNIST Classifier demo (legacy) + +This example demonstrates how to run a simple experiment on the transferability of the fast gradient method (FGM) evasion attack between two neural network architectures. +The demo can be found in the Jupyter notebook file [demo.ipynb](demo.ipynb). + +## Running the example + +To prepare your environment for running this example, follow the linked instructions below: + +1. [Create and activate a Python virtual environment and install the necessary dependencies](../README.md#creating-a-virtual-environment) +2. [Download the MNIST dataset using the download_data.py script.](../README.md#downloading-datasets) +3. [Follow the links in these User Setup instructions](../../README.md#user-setup) to do the following: + - Build the containers + - Use the cookiecutter template to generate the scripts, configuration files, and Docker Compose files you will need to run Dioptra +4. [Edit the docker-compose.yml file to mount the data folder in the worker containers](../README.md#mounting-the-data-folder-in-the-worker-containers) +5. [Initialize and start Dioptra](https://pages.nist.gov/dioptra/getting-started/running-dioptra.html#initializing-the-deployment) +6. [Register the custom task plugins for Dioptra's examples and demos](../README.md#registering-custom-task-plugins) +7. [Register the queues for Dioptra's examples and demos](../README.md#registering-queues) +8. [Start JupyterLab and open `demo.ipynb`](../README.md#starting-jupyter-lab) + +Steps 1–4 and 6–7 only need to be run once. +**Returning users only need to repeat Steps 5 (if you stopped Dioptra using `docker compose down`) and 8 (if you stopped the `jupyter lab` process)**. diff --git a/examples/tensorflow-mnist-classifier-legacy/demo.ipynb b/examples/tensorflow-mnist-classifier-legacy/demo.ipynb new file mode 100644 index 000000000..d31eb0bf7 --- /dev/null +++ b/examples/tensorflow-mnist-classifier-legacy/demo.ipynb @@ -0,0 +1,613 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tensorflow MNIST Classifier demo (legacy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook contains an end-to-end demostration of Dioptra that can be run on any modern laptop.\n", + "Please see the [example README](README.md) for instructions on how to prepare your environment for running this example." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Below we import the necessary Python modules and ensure the proper environment variables are set so that all the code blocks will work as expected," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Import packages from the Python standard library\n", + "import importlib.util\n", + "import os\n", + "import sys\n", + "import pprint\n", + "import time\n", + "import warnings\n", + "from pathlib import Path\n", + "\n", + "\n", + "def register_python_source_file(module_name: str, filepath: Path) -> None:\n", + " \"\"\"Import a source file directly.\n", + "\n", + " Args:\n", + " module_name: The module name to associate with the imported source file.\n", + " filepath: The path to the source file.\n", + "\n", + " Notes:\n", + " Adapted from the following implementation in the Python documentation:\n", + " https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly\n", + " \"\"\"\n", + " spec = importlib.util.spec_from_file_location(module_name, str(filepath))\n", + " module = importlib.util.module_from_spec(spec)\n", + " sys.modules[module_name] = module\n", + " spec.loader.exec_module(module)\n", + "\n", + "\n", + "# Filter out warning messages\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", + "# Experiment name\n", + "EXPERIMENT_NAME = \"mnist\"\n", + "\n", + "# Default address for accessing the RESTful API service\n", + "RESTAPI_ADDRESS = \"http://localhost:80\"\n", + "\n", + "# Set DIOPTRA_RESTAPI_URI variable if not defined, used to connect to RESTful API service\n", + "if os.getenv(\"DIOPTRA_RESTAPI_URI\") is None:\n", + " os.environ[\"DIOPTRA_RESTAPI_URI\"] = RESTAPI_ADDRESS\n", + "\n", + "# Default address for accessing the MLFlow Tracking server\n", + "MLFLOW_TRACKING_URI = \"http://localhost:35000\"\n", + "\n", + "# Set MLFLOW_TRACKING_URI variable, used to connect to MLFlow Tracking service\n", + "if os.getenv(\"MLFLOW_TRACKING_URI\") is None:\n", + " os.environ[\"MLFLOW_TRACKING_URI\"] = MLFLOW_TRACKING_URI\n", + "\n", + "# Path to workflows archive\n", + "WORKFLOWS_TAR_GZ = Path(\"workflows.tar.gz\")\n", + "\n", + "# Register the examples/scripts directory as a Python module\n", + "register_python_source_file(\"scripts\", Path(\"..\", \"scripts\", \"__init__.py\"))\n", + "\n", + "from scripts.client import DioptraClient\n", + "from scripts.utils import make_tar\n", + "\n", + "# Import third-party Python packages\n", + "import numpy as np\n", + "from mlflow.tracking import MlflowClient\n", + "\n", + "# Create random number generator\n", + "rng = np.random.default_rng(54399264723942495723666216079516778448)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We obtained a copy of the MNIST dataset when we ran `download_data.py` script. If you have not done so already, see [How to Obtain Common Datasets](https://pages.nist.gov/dioptra/getting-started/acquiring-datasets.html).\n", + "The training and testing images for the MNIST dataset are stored within the `/dioptra/data/Mnist` directory as PNG files that are organized into the following folder structure," + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " Mnist\n", + " ├── testing\n", + " │ ├── 0\n", + " │ ├── 1\n", + " │ ├── 2\n", + " │ ├── 3\n", + " │ ├── 4\n", + " │ ├── 5\n", + " │ ├── 6\n", + " │ ├── 7\n", + " │ ├── 8\n", + " │ └── 9\n", + " └── training\n", + " ├── 0\n", + " ├── 1\n", + " ├── 2\n", + " ├── 3\n", + " ├── 4\n", + " ├── 5\n", + " ├── 6\n", + " ├── 7\n", + " ├── 8\n", + " └── 9" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The subfolders under `training/` and `testing/` are the classification labels for the images in the dataset.\n", + "This folder structure is a standardized way to encode the label information and many libraries can make use of it, including the Tensorflow library that we are using for this particular demo." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Submit and run jobs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The entrypoints that we will be running in this example are implemented in the Python source files under `src/` and the `src/MLproject` file.\n", + "To run these entrypoints within Dioptra's architecture, we need to package those files up into an archive and submit it to the Dioptra RESTful API to create a new job.\n", + "For convenience, we provide the `make_tar` helper function defined in `examples/scripts/utils.py`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "make_tar([\"src\"], WORKFLOWS_TAR_GZ)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To connect with the endpoint, we will use a client class defined in the `examples/scripts/client.py` file that is able to connect with the Dioptra RESTful API using the HTTP protocol.\n", + "We connect using the client below.\n", + "The client uses the environment variable `DIOPTRA_RESTAPI_URI`, which we configured at the top of the notebook, to figure out how to connect to the Dioptra RESTful API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "restapi_client = DioptraClient()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We need to register an experiment under which to collect our job runs.\n", + "The code below checks if the relevant experiment named `\"mnist\"` exists.\n", + "If it does, then it just returns info about the experiment, if it doesn't, it then registers the new experiment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "response_experiment = restapi_client.get_experiment_by_name(name=EXPERIMENT_NAME)\n", + "\n", + "if response_experiment is None or \"Not Found\" in response_experiment.get(\"message\", []):\n", + " response_experiment = restapi_client.register_experiment(name=EXPERIMENT_NAME)\n", + "\n", + "response_experiment" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to train our model.\n", + "Depending on the specs of your computer, training either the shallow net model or the LeNet-5 model on a CPU can take 10-20 minutes or longer to complete.\n", + "If you are fortunate enough to have access to a dedicated GPU, then the training time will be much shorter.\n", + "\n", + "So that we do not start this code by accident, we are embedding the code in a text block instead of keeping it in an executable code block.\n", + "**If you need to train one of the models, create a new code block and copy and paste the code into it.**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```python\n", + "# Submit training job for the shallow network architecture\n", + "response_shallow_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join([\n", + " \"-P model_architecture=shallow_net\",\n", + " \"-P epochs=30\",\n", + " \"-P register_model_name=mnist_shallow_net\",\n", + " ]),\n", + ")\n", + "\n", + "print(\"Training job for shallow neural network submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_shallow_train)\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```python\n", + "# Submit training job for the LeNet-5 network architecture\n", + "response_le_net_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join([\n", + " \"-P model_architecture=le_net\",\n", + " \"-P epochs=30\",\n", + " \"-P register_model_name=mnist_le_net\",\n", + " ]),\n", + ")\n", + "\n", + "print(\"Training job for LeNet-5 neural network submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_le_net_train)\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have two trained models (the shallow network and the LeNet-5 network), next we will apply the fast-gradient method (FGM) evasion attack on the shallow network to generate adversarial images.\n", + "Then, after we have the adversarial images, we will use them to evaluate some standard machine learning metrics against both models.\n", + "This will give us a sense of the transferability of the attacks between models.\n", + "\n", + "This specific workflow is an example of jobs that contain dependencies, as the metric evaluation jobs cannot start until the adversarial image generation jobs have completed.\n", + "Dioptra allows users to declare one-to-many job dependencies like this, which we will use to queue up jobs to start immediately after the previous jobs have concluded.\n", + "The code below illustrates this by doing the following:\n", + "\n", + "1. A job is submitted that generates adversarial images based on the shallow net architecture (entry point **fgm**).\n", + "1. We wait until the job starts and a MLFlow identifier is assigned, which we check by polling the API until we see the id appear.\n", + "1. Once we have an id returned to us from the API, we queue up the metrics evaluation jobs and declare the job dependency using the `depends_on` option.\n", + "1. The message \"Dependent jobs submitted\" will display once everything is queued up." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def mlflow_run_id_is_not_known(response_fgm):\n", + " return response_fgm[\"mlflowRunId\"] is None and response_fgm[\"status\"] not in [\n", + " \"failed\",\n", + " \"finished\",\n", + " ]\n", + "\n", + "response_fgm_shallow_net = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"fgm\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\"-P model_name=mnist_shallow_net\", \"-P model_version=1\"]\n", + " ),\n", + ")\n", + "\n", + "print(\"FGM attack (shallow net architecture) job submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_fgm_shallow_net)\n", + "print(\"\")\n", + "\n", + "while mlflow_run_id_is_not_known(response_fgm_shallow_net):\n", + " time.sleep(1)\n", + " response_fgm_shallow_net = restapi_client.get_job_by_id(\n", + " response_fgm_shallow_net[\"jobId\"]\n", + " )\n", + "\n", + "response_shallow_net_infer_shallow_net = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"infer\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " f\"-P run_id={response_fgm_shallow_net['mlflowRunId']}\",\n", + " \"-P model_name=mnist_shallow_net\",\n", + " \"-P model_version=1\",\n", + " ]\n", + " ),\n", + " depends_on=response_fgm_shallow_net[\"jobId\"],\n", + ")\n", + "\n", + "response_le_net_infer_shallow_net = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"infer\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " f\"-P run_id={response_fgm_shallow_net['mlflowRunId']}\",\n", + " \"-P model_name=mnist_le_net\",\n", + " \"-P model_version=1\",\n", + " ]\n", + " ),\n", + " depends_on=response_fgm_shallow_net[\"jobId\"],\n", + ")\n", + "\n", + "print(\"Dependent jobs submitted\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can poll the status of the dependent jobs using the code below.\n", + "We should see the status of the jobs shift from \"queued\" to \"started\" and eventually become \"finished\"." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response_shallow_net_infer_shallow_net = restapi_client.get_job_by_id(\n", + " response_shallow_net_infer_shallow_net[\"jobId\"]\n", + ")\n", + "response_le_net_infer_shallow_net = restapi_client.get_job_by_id(\n", + " response_le_net_infer_shallow_net[\"jobId\"]\n", + ")\n", + "\n", + "pprint.pprint(response_shallow_net_infer_shallow_net)\n", + "print(\"\")\n", + "pprint.pprint(response_le_net_infer_shallow_net)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can similiarly run an FGM-based evasion attack using the LeNet-5 architecture as our starting point.\n", + "The following code is very similar to the code we just saw, all we've done is swap out the entry point keyword argument that requests the shallow net architecture with a version that requests the LeNet-5 architecture." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response_fgm_le_net = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"fgm\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\"-P model_name=mnist_le_net\", \"-P model_version=1\"]\n", + " ),\n", + ")\n", + "\n", + "print(\"FGM attack (LeNet-5 architecture) job submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_fgm_le_net)\n", + "print(\"\")\n", + "\n", + "while mlflow_run_id_is_not_known(response_fgm_le_net):\n", + " time.sleep(1)\n", + " response_fgm_le_net = restapi_client.get_job_by_id(response_fgm_le_net[\"jobId\"])\n", + "\n", + "response_shallow_net_infer_le_net_fgm = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"infer\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " f\"-P run_id={response_fgm_le_net['mlflowRunId']}\",\n", + " \"-P model_name=mnist_shallow_net\",\n", + " \"-P model_version=1\",\n", + " ]\n", + " ),\n", + " depends_on=response_fgm_le_net[\"jobId\"],\n", + ")\n", + "\n", + "response_le_net_infer_le_net_fgm = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"infer\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " f\"-P run_id={response_fgm_le_net['mlflowRunId']}\",\n", + " \"-P model_name=mnist_le_net\",\n", + " \"-P model_version=1\",\n", + " ]\n", + " ),\n", + " depends_on=response_fgm_le_net[\"jobId\"],\n", + ")\n", + "\n", + "print(\"Dependent jobs submitted\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Like before, we can monitor the status of the dependent jobs by querying the API using the code block below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response_shallow_net_infer_le_net_fgm = restapi_client.get_job_by_id(\n", + " response_shallow_net_infer_le_net_fgm[\"jobId\"]\n", + ")\n", + "response_le_net_infer_le_net_fgm = restapi_client.get_job_by_id(\n", + " response_le_net_infer_le_net_fgm[\"jobId\"]\n", + ")\n", + "\n", + "pprint.pprint(response_shallow_net_infer_le_net_fgm)\n", + "print(\"\")\n", + "pprint.pprint(response_le_net_infer_le_net_fgm)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Congratulations, you've just run your first experiment using Dioptra!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Querying the MLFlow Tracking Service" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Currently the Dioptra API can only be used to register experiments and start jobs, so if users wish to extract their results programmatically, they can use the `MlflowClient()` class from the `mlflow` or `mlflow-skinny` Python packages to connect and query their results.\n", + "Since we captured the run ids generated by MLFlow, we can easily retrieve the data logged about one of our jobs and inspect the results.\n", + "To start the client, we simply need to run," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mlflow_client = MlflowClient()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The client uses the environment variable `MLFLOW_TRACKING_URI` to figure out how to connect to the MLFlow Tracking Service, which we configured near the top of this notebook.\n", + "To query the results of one of our runs, we just need to pass the run id to the client's `get_run()` method.\n", + "As an example, let's query the run results for the FGM attack applied to the LeNet-5 architecture," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fgm_run_le_net = mlflow_client.get_run(response_fgm_le_net[\"mlflowRunId\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If the request completed successfully, we should now be able to query data collected during the run.\n", + "For example, to review the collected metrics, we just use," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pprint.pprint(fgm_run_le_net.data.metrics)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To review the run's parameters, we use," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pprint.pprint(fgm_run_le_net.data.params)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To review the run's tags, we use," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pprint.pprint(fgm_run_le_net.data.tags)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There are many things you can query using the MLFlow client.\n", + "[The MLFlow documentation gives a full overview of the methods that are available](https://www.mlflow.org/docs/1.30.1/python_api/mlflow.client.html#mlflow.client.MlflowClient)." + ] + } + ], + "metadata": { + "interpreter": { + "hash": "edee40310913f16e2ca02c1d37887bcb7f07f00399ca119bb7e27de7d632ea99" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/tensorflow-mnist-classifier-legacy/src/MLproject b/examples/tensorflow-mnist-classifier-legacy/src/MLproject new file mode 100644 index 000000000..cb92a9df0 --- /dev/null +++ b/examples/tensorflow-mnist-classifier-legacy/src/MLproject @@ -0,0 +1,91 @@ +# This Software (Dioptra) is being made available as a public service by the +# National Institute of Standards and Technology (NIST), an Agency of the United +# States Department of Commerce. This software was developed in part by employees of +# NIST and in part by NIST contractors. Copyright in portions of this software that +# were developed by NIST contractors has been licensed or assigned to NIST. Pursuant +# to Title 17 United States Code Section 105, works of NIST employees are not +# subject to copyright protection in the United States. However, NIST may hold +# international copyright in software created by its employees and domestic +# copyright (or licensing rights) in portions of software that were assigned or +# licensed to NIST. To the extent that NIST holds copyright in this software, it is +# being made available under the Creative Commons Attribution 4.0 International +# license (CC BY 4.0). The disclaimers of the CC BY 4.0 license apply to all parts +# of the software developed or licensed by NIST. +# +# ACCESS THE FULL CC BY 4.0 LICENSE HERE: +# https://creativecommons.org/licenses/by/4.0/legalcode +name: tensorflow-mnist-classifier + +entry_points: + fgm: + parameters: + data_dir: { type: path, default: "/dioptra/data/Mnist" } + image_size: { type: string, default: "28,28,1" } + adv_tar_name: { type: string, default: "testing_adversarial_fgm.tar.gz" } + adv_data_dir: { type: string, default: "adv_testing" } + model_name: { type: string, default: "mnist_le_net" } + model_version: { type: string, default: "1" } + batch_size: { type: float, default: 32 } + eps: { type: float, default: 0.3 } + eps_step: { type: float, default: 0.1 } + minimal: { type: float, default: 0 } + norm: { type: string, default: "inf" } + seed: { type: float, default: -1 } + command: > + python fgm.py + --data-dir {data_dir} + --image-size {image_size} + --adv-tar-name {adv_tar_name} + --adv-data-dir {adv_data_dir} + --model-name {model_name} + --model-version {model_version} + --batch-size {batch_size} + --eps {eps} + --eps-step {eps_step} + --minimal {minimal} + --norm {norm} + --seed {seed} + + infer: + parameters: + run_id: { type: string } + image_size: { type: string, default: "28,28,1" } + model_name: { type: string, default: "mnist_le_net" } + model_version: { type: string, default: "1" } + adv_tar_name: { type: string, default: "testing_adversarial_fgm.tar.gz" } + adv_data_dir: { type: string, default: "adv_testing" } + seed: { type: float, default: -1 } + command: > + python infer.py + --run-id {run_id} + --image-size {image_size} + --model-name {model_name} + --model-version {model_version} + --adv-tar-name {adv_tar_name} + --adv-data-dir {adv_data_dir} + --seed {seed} + + train: + parameters: + data_dir: { type: path, default: "/dioptra/data/Mnist" } + image_size: { type: string, default: "28,28,1" } + model_architecture: { type: string, default: "le_net" } + epochs: { type: float, default: 30 } + batch_size: { type: float, default: 32 } + register_model_name: { type: string, default: "" } + learning_rate: { type: float, default: 0.001 } + optimizer: { type: string, default: "Adam" } + validation_split: { type: float, default: 0.2 } + seed: { type: float, default: -1 } + command: > + python train.py + --data-dir {data_dir} + --image-size {image_size} + --model-architecture {model_architecture} + --epochs {epochs} + --batch-size {batch_size} + --register-model-name {register_model_name} + --learning-rate {learning_rate} + --optimizer {optimizer} + --validation-split {validation_split} + --seed {seed} diff --git a/examples/tensorflow-mnist-classifier/src/fgm.py b/examples/tensorflow-mnist-classifier-legacy/src/fgm.py similarity index 99% rename from examples/tensorflow-mnist-classifier/src/fgm.py rename to examples/tensorflow-mnist-classifier-legacy/src/fgm.py index 492e92117..4647134d9 100644 --- a/examples/tensorflow-mnist-classifier/src/fgm.py +++ b/examples/tensorflow-mnist-classifier-legacy/src/fgm.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-classifier/src/infer.py b/examples/tensorflow-mnist-classifier-legacy/src/infer.py similarity index 99% rename from examples/tensorflow-mnist-classifier/src/infer.py rename to examples/tensorflow-mnist-classifier-legacy/src/infer.py index 725c0bc58..af4187221 100644 --- a/examples/tensorflow-mnist-classifier/src/infer.py +++ b/examples/tensorflow-mnist-classifier-legacy/src/infer.py @@ -22,6 +22,7 @@ import click import mlflow import mlflow.tensorflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-classifier/src/train.py b/examples/tensorflow-mnist-classifier-legacy/src/train.py similarity index 99% rename from examples/tensorflow-mnist-classifier/src/train.py rename to examples/tensorflow-mnist-classifier-legacy/src/train.py index 944b9777e..c4b493c69 100644 --- a/examples/tensorflow-mnist-classifier/src/train.py +++ b/examples/tensorflow-mnist-classifier-legacy/src/train.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-classifier/demo.ipynb b/examples/tensorflow-mnist-classifier/demo.ipynb index 7391cb4b1..7fbfa573a 100644 --- a/examples/tensorflow-mnist-classifier/demo.ipynb +++ b/examples/tensorflow-mnist-classifier/demo.ipynb @@ -31,7 +31,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "tags": [] }, @@ -171,11 +171,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "PosixPath('/Users/jglasbrenner/Code/NIST - Direct Work/dioptra/examples/tensorflow-mnist-classifier/workflows.tar.gz')" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "make_tar([\"src\"], WORKFLOWS_TAR_GZ)" ] @@ -191,7 +202,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "tags": [] }, @@ -200,6 +211,26 @@ "restapi_client = DioptraClient()" ] }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'http://localhost:80/api/taskPlugin/'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "restapi_client.task_plugin_endpoint" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -211,11 +242,25 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "{'experimentId': 1,\n", + " 'createdOn': '2023-09-08T20:10:44.625973',\n", + " 'lastModified': '2023-09-08T20:10:44.625973',\n", + " 'name': 'mnist'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "response_experiment = restapi_client.get_experiment_by_name(name=EXPERIMENT_NAME)\n", "\n", @@ -260,6 +305,51 @@ "```" ] }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training job for shallow neural network submitted\n", + "\n", + "{'createdOn': '2023-09-08T20:16:01.141053',\n", + " 'dependsOn': None,\n", + " 'entryPoint': 'train',\n", + " 'entryPointKwargs': '-P model_architecture=shallow_net -P epochs=3 -P '\n", + " 'register_model_name=mnist_shallow_net',\n", + " 'experimentId': 1,\n", + " 'jobId': '4ac4c474-6752-426f-b304-1b280d26dd23',\n", + " 'lastModified': '2023-09-08T20:16:01.141053',\n", + " 'mlflowRunId': None,\n", + " 'queueId': 1,\n", + " 'status': 'queued',\n", + " 'timeout': '24h',\n", + " 'workflowUri': 's3://workflow/851b056a75214e6eacfc80fae789e234/workflows.tar.gz'}\n" + ] + } + ], + "source": [ + "# Submit training job for the shallow network architecture\n", + "response_shallow_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join([\n", + " \"-P model_architecture=shallow_net\",\n", + " \"-P epochs=3\",\n", + " \"-P register_model_name=mnist_shallow_net\",\n", + " ]),\n", + ")\n", + "\n", + "print(\"Training job for shallow neural network submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_shallow_train)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -283,6 +373,51 @@ "```" ] }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training job for LeNet-5 neural network submitted\n", + "\n", + "{'createdOn': '2023-09-08T20:30:26.581489',\n", + " 'dependsOn': None,\n", + " 'entryPoint': 'train',\n", + " 'entryPointKwargs': '-P model_architecture=le_net -P epochs=2 -P '\n", + " 'register_model_name=mnist_le_net',\n", + " 'experimentId': 1,\n", + " 'jobId': 'e760b65d-8711-4653-9e30-e49bfce752d2',\n", + " 'lastModified': '2023-09-08T20:30:26.581489',\n", + " 'mlflowRunId': None,\n", + " 'queueId': 1,\n", + " 'status': 'queued',\n", + " 'timeout': '24h',\n", + " 'workflowUri': 's3://workflow/e83c9d57f69a4740955d0a8e193863e3/workflows.tar.gz'}\n" + ] + } + ], + "source": [ + "# Submit training job for the LeNet-5 network architecture\n", + "response_le_net_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join([\n", + " \"-P model_architecture=le_net\",\n", + " \"-P epochs=2\",\n", + " \"-P register_model_name=mnist_le_net\",\n", + " ]),\n", + ")\n", + "\n", + "print(\"Training job for LeNet-5 neural network submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_le_net_train)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -303,9 +438,32 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "FGM attack (shallow net architecture) job submitted\n", + "\n", + "{'createdOn': '2023-09-08T20:33:00.250653',\n", + " 'dependsOn': None,\n", + " 'entryPoint': 'fgm',\n", + " 'entryPointKwargs': '-P model_name=mnist_shallow_net -P model_version=1',\n", + " 'experimentId': 1,\n", + " 'jobId': 'fb4ff178-513d-4bf0-8ae5-a38a6d05ee22',\n", + " 'lastModified': '2023-09-08T20:33:00.250653',\n", + " 'mlflowRunId': None,\n", + " 'queueId': 1,\n", + " 'status': 'queued',\n", + " 'timeout': '24h',\n", + " 'workflowUri': 's3://workflow/a752e16b67a844738fb14c08fe1ab0cf/workflows.tar.gz'}\n", + "\n", + "Dependent jobs submitted\n" + ] + } + ], "source": [ "def mlflow_run_id_is_not_known(response_fgm):\n", " return response_fgm[\"mlflowRunId\"] is None and response_fgm[\"status\"] not in [\n", @@ -374,9 +532,43 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'createdOn': '2023-09-08T20:33:08.492395',\n", + " 'dependsOn': 'fb4ff178-513d-4bf0-8ae5-a38a6d05ee22',\n", + " 'entryPoint': 'infer',\n", + " 'entryPointKwargs': '-P run_id=b428e6d756f24a1d870a4f1e41f9232c -P '\n", + " 'model_name=mnist_shallow_net -P model_version=1',\n", + " 'experimentId': 1,\n", + " 'jobId': '41918462-f0d1-4843-8bc5-10a957712a48',\n", + " 'lastModified': '2023-09-08T20:33:29.866172',\n", + " 'mlflowRunId': '9c969ceab8ba4e97928a27d53fc6f31f',\n", + " 'queueId': 1,\n", + " 'status': 'finished',\n", + " 'timeout': '24h',\n", + " 'workflowUri': 's3://workflow/5ef86348918a418a8f34e40db9d13aea/workflows.tar.gz'}\n", + "\n", + "{'createdOn': '2023-09-08T20:33:08.513217',\n", + " 'dependsOn': 'fb4ff178-513d-4bf0-8ae5-a38a6d05ee22',\n", + " 'entryPoint': 'infer',\n", + " 'entryPointKwargs': '-P run_id=b428e6d756f24a1d870a4f1e41f9232c -P '\n", + " 'model_name=mnist_le_net -P model_version=1',\n", + " 'experimentId': 1,\n", + " 'jobId': 'a5a23840-07ff-47d2-9465-c07c93c5e017',\n", + " 'lastModified': '2023-09-08T20:33:43.959382',\n", + " 'mlflowRunId': 'cce30073c5a74066b0c9d741e392df9b',\n", + " 'queueId': 1,\n", + " 'status': 'finished',\n", + " 'timeout': '24h',\n", + " 'workflowUri': 's3://workflow/fbdf24d0640c4fcc886b0f7ae39fb8b5/workflows.tar.gz'}\n" + ] + } + ], "source": [ "response_shallow_net_infer_shallow_net = restapi_client.get_job_by_id(\n", " response_shallow_net_infer_shallow_net[\"jobId\"]\n", @@ -605,7 +797,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.2" + "version": "3.9.13" } }, "nbformat": 4, diff --git a/examples/tensorflow-mnist-classifier/src/MLproject b/examples/tensorflow-mnist-classifier/src/MLproject index cb92a9df0..543195348 100644 --- a/examples/tensorflow-mnist-classifier/src/MLproject +++ b/examples/tensorflow-mnist-classifier/src/MLproject @@ -19,8 +19,8 @@ name: tensorflow-mnist-classifier entry_points: fgm: parameters: - data_dir: { type: path, default: "/dioptra/data/Mnist" } - image_size: { type: string, default: "28,28,1" } + data_dir: { type: path, default: "/dioptra/data/Mnist/testing" } + image_size: { type: string, default: "[28,28,1]" } adv_tar_name: { type: string, default: "testing_adversarial_fgm.tar.gz" } adv_data_dir: { type: string, default: "adv_testing" } model_name: { type: string, default: "mnist_le_net" } @@ -28,64 +28,63 @@ entry_points: batch_size: { type: float, default: 32 } eps: { type: float, default: 0.3 } eps_step: { type: float, default: 0.1 } - minimal: { type: float, default: 0 } + minimal: { type: string, default: "false" } norm: { type: string, default: "inf" } seed: { type: float, default: -1 } command: > - python fgm.py - --data-dir {data_dir} - --image-size {image_size} - --adv-tar-name {adv_tar_name} - --adv-data-dir {adv_data_dir} - --model-name {model_name} - --model-version {model_version} - --batch-size {batch_size} - --eps {eps} - --eps-step {eps_step} - --minimal {minimal} - --norm {norm} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment fgm.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment fgm.yml + -P data_dir={data_dir} + -P image_size={image_size} + -P adv_tar_name={adv_tar_name} + -P adv_data_dir={adv_data_dir} + -P model_name={model_name} + -P model_version={model_version} + -P batch_size={batch_size} + -P eps={eps} + -P eps_step={eps_step} + -P norm={norm} + -P seed={seed} infer: parameters: run_id: { type: string } - image_size: { type: string, default: "28,28,1" } + image_size: { type: string, default: "[28,28,1]" } model_name: { type: string, default: "mnist_le_net" } model_version: { type: string, default: "1" } adv_tar_name: { type: string, default: "testing_adversarial_fgm.tar.gz" } adv_data_dir: { type: string, default: "adv_testing" } seed: { type: float, default: -1 } command: > - python infer.py - --run-id {run_id} - --image-size {image_size} - --model-name {model_name} - --model-version {model_version} - --adv-tar-name {adv_tar_name} - --adv-data-dir {adv_data_dir} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment infer.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment infer.yml + -P run_id={run_id} + -P image_size={image_size} + -P model_name={model_name} + -P model_version={model_version} + -P adv_tar_name={adv_tar_name} + -P adv_data_dir={adv_data_dir} + -P seed={seed} train: parameters: - data_dir: { type: path, default: "/dioptra/data/Mnist" } - image_size: { type: string, default: "28,28,1" } + training_dir: { type: path, default: "/dioptra/data/Mnist/training" } + testing_dir: { type: path, default: "/dioptra/data/Mnist/testing" } + image_size: { type: string, default: "[28,28,1]" } model_architecture: { type: string, default: "le_net" } epochs: { type: float, default: 30 } batch_size: { type: float, default: 32 } register_model_name: { type: string, default: "" } learning_rate: { type: float, default: 0.001 } - optimizer: { type: string, default: "Adam" } validation_split: { type: float, default: 0.2 } seed: { type: float, default: -1 } command: > - python train.py - --data-dir {data_dir} - --image-size {image_size} - --model-architecture {model_architecture} - --epochs {epochs} - --batch-size {batch_size} - --register-model-name {register_model_name} - --learning-rate {learning_rate} - --optimizer {optimizer} - --validation-split {validation_split} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment train.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment train.yml + -P training_dir={training_dir} + -P testing_dir={testing_dir} + -P image_size={image_size} + -P model_architecture={model_architecture} + -P epochs={epochs} + -P batch_size={batch_size} + -P register_model_name={register_model_name} + -P learning_rate={learning_rate} + -P validation_split={validation_split} + -P seed={seed} diff --git a/examples/tensorflow-mnist-classifier/src/fgm.yml b/examples/tensorflow-mnist-classifier/src/fgm.yml new file mode 100644 index 000000000..488d4f427 --- /dev/null +++ b/examples/tensorflow-mnist-classifier/src/fgm.yml @@ -0,0 +1,239 @@ +types: + rng: + path: + path_string: + union: [string, path] + path_string_null: + union: [path_string, "null"] + dirs: + list: path_string + parameters: + mapping: [string, number] + kwargs: + mapping: [string, any] + kwargs_null: + union: [kwargs, "null"] + keras_classifier: + distance_metric_request: + mapping: [string, string] + distance_metrics_requests: + list: distance_metric_request + distance_metric: + tuple: [string, any] + distance_metrics: + list: distance_metric + distance_metrics_null: + union: [distance_metrics, "null"] + dataframe: + image_size: + tuple: [integer, integer, integer] + norm: + union: [integer, number, string] + +parameters: + data_dir: /dioptra/data/Mnist/testing + image_size: [28, 28, 1] + adv_tar_name: testing_adversarial_fgm.tar.gz + adv_data_dir: adv_testing + distance_metrics_filename: distance_metrics.csv + model_name: mnist_le_net + model_version: 1 + clip_values: [0, 1] + batch_size: 32 + eps: 0.3 + eps_step: 0.1 + minimal: false + norm: np.inf + seed: -1 + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + make_directories: + plugin: dioptra_builtins.artifacts.utils.make_directories + inputs: + - dirs: dirs + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + load_classifier: + plugin: dioptra_builtins.registry.art.load_wrapped_tensorflow_keras_classifier + inputs: + - name: name + type: string + required: true + - version: integer + - name: classifier_kwargs + type: kwargs + required: false + outputs: + classifier: keras_classifier + + distance_metrics: + plugin: dioptra_builtins.metrics.distance.get_distance_metric_list + inputs: + - request: distance_metrics_requests + outputs: + distance_metrics_list: distance_metrics + + create_dataset: + plugin: dioptra_builtins.attacks.fgm.create_adversarial_fgm_dataset + inputs: + - data_dir: string + - adv_data_dir: path_string + - keras_classifier: keras_classifier + - image_size: image_size + - name: distance_metrics_list + type: distance_metrics_null + required: false + - name: rescale + type: number + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + - name: eps + type: number + required: false + - name: eps_step + type: number + required: false + - name: minimal + type: boolean + required: false + - name: norm + type: norm + required: false + outputs: + dataset: dataframe + + upload_artifact_directory: + plugin: dioptra_builtins.artifacts.mlflow.upload_directory_as_tarball_artifact + inputs: + - source_dir: path_string + - tarball_filename: string + - name: tarball_write_mode + type: string + required: false + - name: working_dir + type: path_string_null + required: false + + upload_artifact_dataframe: + plugin: dioptra_builtins.artifacts.mlflow.upload_data_frame_artifact + inputs: + - data_frame: dataframe + - file_name: string + - file_format: string + - name: file_format_kwargs + type: kwargs_null + required: false + - name: working_dir + type: path_string_null + required: false + +graph: + init_rng: + init_rng: $seed + + tensorflow_global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow_results: + init_tensorflow: $tensorflow_global_seed + + make_directories_results: + make_directories: [[$adv_data_dir]] + + log_mlflow_params_result: + log_params: + - entry_point_seed: $seed + tensorflow_global_seed: $tensorflow_global_seed + dataset_seed: $dataset_seed + + keras_classifier: + load_classifier: + name: $model_name + version: $model_version + classifier_kwargs: + clip_values: $clip_values + dependencies: init_tensorflow_results + + distance_metrics: + distance_metrics: + - - name: l_infinity_norm + func: l_inf_norm + - name: l_1_norm + func: l_1_norm + - name: l_2_norm + func: l_2_norm + - name: cosine_similarity + func: paired_cosine_similarities + - name: euclidean_distance + func: paired_euclidean_distances + - name: manhattan_distance + func: paired_manhattan_distances + - name: wasserstein_distance + func: paired_wasserstein_distances + + dataset: + create_dataset: + data_dir: $data_dir + keras_classifier: $keras_classifier + distance_metrics_list: $distance_metrics + adv_data_dir: $adv_data_dir + batch_size: $batch_size + image_size: $image_size + eps: $eps + eps_step: $eps_step + minimal: $minimal + norm: $norm + dependencies: make_directories_results + + upload_directory: + upload_artifact_directory: + - $adv_data_dir + - $adv_tar_name + dependencies: dataset + + upload_dataset: + upload_artifact_dataframe: + - $dataset + - $distance_metrics_filename + - csv.gz + - index: false diff --git a/examples/tensorflow-mnist-classifier/src/infer.yml b/examples/tensorflow-mnist-classifier/src/infer.yml new file mode 100644 index 000000000..756e563c8 --- /dev/null +++ b/examples/tensorflow-mnist-classifier/src/infer.yml @@ -0,0 +1,200 @@ +types: + rng: + path: + sequential: + number_null: + union: [number, "null"] + string_null: + union: [string, "null"] + path_string: + union: [string, path] + path_string_null: + union: [path_string, "null"] + dirs: + list: path_string + directory_iterator: + parameters: + mapping: [string, number] + kwargs: + mapping: [string, any] + kwargs_null: + union: [kwargs, "null"] + keras_classifier: + eval_metric: + mapping: [string, any] + eval_metric_results: + mapping: [string, number] + dataframe: + image_size: + tuple: [integer, integer, integer] + norm: + union: [integer, number, string] + +parameters: + run_id: "" + image_size: [28, 28, 1] + adv_tar_name: testing_adversarial_fgm.tar.gz + adv_data_dir: adv_testing + model_name: mnist_le_net + model_version: 1 + seed: -1 + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + download_artifacts: + plugin: dioptra_builtins.artifacts.mlflow.download_all_artifacts_in_run + inputs: + - run_id: string + - artifact_path: string + - name: destination_path + type: string_null + required: false + outputs: + download_path: string + + extract_tarfile: + plugin: dioptra_builtins.artifacts.utils.extract_tarfile + inputs: + - filepath: path_string + - name: tarball_read_mode + type: string + required: false + - name: output_dir + type: any + required: false + + create_dataset: + plugin: dioptra_builtins.data.tensorflow.create_image_dataset + inputs: + - data_dir: string + - subset: string_null + - image_size: image_size + - seed: integer + - name: rescale + type: number + required: false + - name: validation_split + type: number_null + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + outputs: + iterator: directory_iterator + + load_classifier: + plugin: dioptra_builtins.registry.mlflow.load_tensorflow_keras_classifier + inputs: + - name: name + type: string + required: true + - version: integer + outputs: + classifier: sequential + + evaluate_metrics: + plugin: dioptra_custom.evaluation.tensorflow.evaluate_metrics_tensorflow + inputs: + - classifier: any + - dataset: any + outputs: + metrics: eval_metric_results + log_results: + plugin: dioptra_builtins.tracking.mlflow.log_metrics + inputs: + - metrics: eval_metric_results + +graph: + init_rng: + init_rng: $seed + + tensorflow_global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow_results: + init_tensorflow: $tensorflow_global_seed + + log_mlflow_params_result: + log_params: + - entry_point_seed: $seed + tensorflow_global_seed: $tensorflow_global_seed + dataset_seed: $dataset_seed + + adv_tar_path: + download_artifacts: + run_id: $run_id + artifact_path: $adv_tar_name + + extract_tarfile_results: + extract_tarfile: + filepath: $adv_tar_path + + adv_ds: + create_dataset: + data_dir: $adv_data_dir + subset: null + validation_split: null + image_size: $image_size + seed: $dataset_seed + dependencies: + - init_tensorflow_results + - extract_tarfile_results + + classifier: + load_classifier: + name: $model_name + version: $model_version + dependencies: + - init_tensorflow_results + - adv_ds + + classifier_performance_metrics: + evaluate_metrics: + classifier: $classifier + dataset: $adv_ds + dependencies: + - classifier + + logged_metrics: + log_results: + metrics: $classifier_performance_metrics + dependencies: + - classifier_performance_metrics diff --git a/examples/tensorflow-mnist-classifier/src/train.yml b/examples/tensorflow-mnist-classifier/src/train.yml new file mode 100644 index 000000000..c1d3d2a3f --- /dev/null +++ b/examples/tensorflow-mnist-classifier/src/train.yml @@ -0,0 +1,308 @@ +types: + rng: + optimizer: + name_parameters: + mapping: + name: string + parameters: + mapping: [string, any] + metrics_list: + list: name_parameters + performance_metrics: + metrics: + callbacks_in: + list: name_parameters + callbacks_out: + directory_iterator: + parameters: + mapping: [string, number] + image_size: + tuple: [integer, integer, integer] + sequential: + fit_kwargs: + mapping: [string, any] + fit_kwargs_null: + union: [fit_kwargs, "null"] + str_null: + union: [string, "null"] + num_null: + union: [number, "null"] + +parameters: + seed: -1 + optimizer_name: Adam + learning_rate: 0.001 + training_dir: /dioptra/data/Mnist/training + testing_dir: /dioptra/data/Mnist/testing + image_size: [28, 28, 1] + validation_split: 0.2 + batch_size: 32 + model_architecture: le_net + epochs: 30 + register_model_name: "" + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + get_optimizer: + plugin: dioptra_custom.evaluation.tensorflow.get_optimizer + inputs: + - name: name + type: string + - learning_rate: number + outputs: + optimizer: optimizer + + get_perf_metrics: + plugin: dioptra_custom.evaluation.tensorflow.get_performance_metrics + inputs: + - metrics_list: metrics_list + outputs: + performance_metrics: performance_metrics + + get_callbacks: + plugin: dioptra_custom.evaluation.tensorflow.get_model_callbacks + inputs: + - callbacks_list: callbacks_in + outputs: + callbacks: callbacks_out + + create_dataset: + plugin: dioptra_builtins.data.tensorflow.create_image_dataset + inputs: + - data_dir: string + - subset: str_null + - image_size: image_size + - seed: integer + - name: rescale + type: number + required: false + - name: validation_split + type: num_null + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + outputs: + dataset: directory_iterator + + get_num_classes: + plugin: dioptra_builtins.data.tensorflow.get_n_classes_from_directory_iterator + inputs: + - ds: directory_iterator + outputs: + num_classes: integer + + init_classifier: + plugin: dioptra_builtins.estimators.keras_classifiers.init_classifier + inputs: + - model_architecture: string + - optimizer: optimizer + - metrics: performance_metrics + - input_shape: image_size + - n_classes: integer + - name: loss + type: string + required: false + outputs: + classifier: sequential + + fit_model: + plugin: dioptra_builtins.estimators.methods.fit + inputs: + - estimator: any + - x: any + - name: y + type: any + required: false + - name: fit_kwargs + type: fit_kwargs_null + required: false + + eval_metrics_tensorflow: + plugin: dioptra_custom.evaluation.tensorflow.evaluate_metrics_tensorflow + inputs: + - classifier: sequential + - dataset: directory_iterator + outputs: + metrics: metrics + + log_metrics: + plugin: dioptra_builtins.tracking.mlflow.log_metrics + inputs: + - metrics: metrics + + log_keras_estimator: + plugin: dioptra_builtins.tracking.mlflow.log_tensorflow_keras_estimator + inputs: + - estimator: sequential + - model_dir: string + + add_model_to_registry: + plugin: dioptra_custom.evaluation.mlflow.add_model_to_registry + inputs: + - name: name + type: string + - model_dir: string + +graph: + init_rng: + init_rng: $seed + + global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow: + init_tensorflow: $global_seed + + log_params: + log_params: + - entry_point_seed: $init_rng.seed + tensorflow_global_seed: $global_seed + dataset_seed: $dataset_seed + + optimizer: + get_optimizer: [$optimizer_name, $learning_rate] + dependencies: + - init_tensorflow + + perf_metrics: + get_perf_metrics: + - - name: CategoricalAccuracy + parameters: { name: accuracy } + - name: Precision + parameters: { name: precision } + - name: Recall + parameters: { name: recall } + - name: AUC + parameters: { name: auc } + dependencies: + - init_tensorflow + + callbacks: + get_callbacks: + - - name: EarlyStopping + parameters: + monitor: val_loss + min_delta: .01 + patience: 5 + restore_best_weights: true + dependencies: + - init_tensorflow + + training_dataset: + create_dataset: + data_dir: $training_dir + subset: training + image_size: $image_size + seed: $dataset_seed + validation_split: $validation_split + batch_size: $batch_size + dependencies: + - init_tensorflow + + validation_dataset: + create_dataset: + data_dir: $training_dir + subset: validation + image_size: $image_size + seed: $dataset_seed + validation_split: $validation_split + batch_size: $batch_size + dependencies: + - init_tensorflow + + testing_dataset: + create_dataset: + data_dir: $testing_dir + subset: null + image_size: $image_size + seed: $dataset_seed + validation_split: null + batch_size: $batch_size + dependencies: + - init_tensorflow + + num_classes: + get_num_classes: $training_dataset + + classifier: + init_classifier: + - $model_architecture + - $optimizer + - $perf_metrics + - $image_size + - $num_classes + dependencies: + - init_tensorflow + + model: + fit_model: + estimator: $classifier + x: $training_dataset + fit_kwargs: + nb_epochs: $epochs + validation_data: $validation_dataset + callbacks: $callbacks + verbose: 2 + + eval_metrics_tensorflow: + eval_metrics_tensorflow: + - $classifier + - $testing_dataset + dependencies: + - model + + log_metrics: + log_metrics: $eval_metrics_tensorflow + + log_keras_estimator: + log_keras_estimator: + - $classifier + - model + dependencies: + - model + + add_model_to_registry: + add_model_to_registry: + - $register_model_name + - model + dependencies: + - log_keras_estimator diff --git a/examples/tensorflow-mnist-feature-squeezing/src/cw_inf.py b/examples/tensorflow-mnist-feature-squeezing/src/cw_inf.py index 4adbfc4e3..45ef6fc40 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/cw_inf.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/cw_inf.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-feature-squeezing/src/cw_l2.py b/examples/tensorflow-mnist-feature-squeezing/src/cw_l2.py index 6b555c8eb..53698a02e 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/cw_l2.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/cw_l2.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-feature-squeezing/src/deepfool.py b/examples/tensorflow-mnist-feature-squeezing/src/deepfool.py index f67e10de3..8c8327541 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/deepfool.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/deepfool.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-feature-squeezing/src/feature_squeeze.py b/examples/tensorflow-mnist-feature-squeezing/src/feature_squeeze.py index 7f0ffa6df..13a691421 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/feature_squeeze.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/feature_squeeze.py @@ -20,6 +20,7 @@ warnings.filterwarnings("ignore") +import sklearn # noqa: F401 import tensorflow as tf tf.compat.v1.disable_eager_execution() @@ -30,7 +31,6 @@ import click import mlflow -import numpy as np import structlog from prefect import Flow, Parameter diff --git a/examples/tensorflow-mnist-feature-squeezing/src/fgm.py b/examples/tensorflow-mnist-feature-squeezing/src/fgm.py index b23d9f575..6b7b42943 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/fgm.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/fgm.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-feature-squeezing/src/infer.py b/examples/tensorflow-mnist-feature-squeezing/src/infer.py index ee5e2dae0..2936b0f04 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/infer.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/infer.py @@ -22,6 +22,7 @@ import click import mlflow import mlflow.tensorflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-feature-squeezing/src/jsma.py b/examples/tensorflow-mnist-feature-squeezing/src/jsma.py index 2cc5bd06b..21367ea7b 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/jsma.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/jsma.py @@ -23,6 +23,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-feature-squeezing/src/train.py b/examples/tensorflow-mnist-feature-squeezing/src/train.py index d017f276e..f891bbfeb 100644 --- a/examples/tensorflow-mnist-feature-squeezing/src/train.py +++ b/examples/tensorflow-mnist-feature-squeezing/src/train.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-model-inversion-legacy/README.md b/examples/tensorflow-mnist-model-inversion-legacy/README.md new file mode 100644 index 000000000..7744e0ba4 --- /dev/null +++ b/examples/tensorflow-mnist-model-inversion-legacy/README.md @@ -0,0 +1,24 @@ +# Tensorflow MNIST Model Inversion Demo (legacy) + +>⚠️ **Warning:** The attack used in this demo is computationally expensive and will take a very long to complete if run using the CPUs found in a typical personal computer. +> For this reason, it is highly recommended that you run these demos on a CUDA-compatible GPU. + +The demo provided in the Jupyter notebook [demo.ipynb](demo.ipynb) uses Dioptra to run experiments that investigate the model inversion attack when launched on a neural network model trained on the MNIST dataset. + +## Running the example + +To prepare your environment for running this example, follow the linked instructions below: + +1. [Create and activate a Python virtual environment and install the necessary dependencies](../README.md#creating-a-virtual-environment) +2. [Download the MNIST dataset using the download_data.py script.](../README.md#downloading-datasets) +3. [Follow the links in these User Setup instructions](../../README.md#user-setup) to do the following: + - Build the containers + - Use the cookiecutter template to generate the scripts, configuration files, and Docker Compose files you will need to run Dioptra +4. [Edit the docker-compose.yml file to mount the data folder in the worker containers](../README.md#mounting-the-data-folder-in-the-worker-containers) +5. [Initialize and start Dioptra](https://pages.nist.gov/dioptra/getting-started/running-dioptra.html#initializing-the-deployment) +6. [Register the custom task plugins for Dioptra's examples and demos](../README.md#registering-custom-task-plugins) +7. [Register the queues for Dioptra's examples and demos](../README.md#registering-queues) +8. [Start JupyterLab and open `demo.ipynb`](../README.md#starting-jupyter-lab) + +Steps 1–4 and 6–7 only need to be run once. +**Returning users only need to repeat Steps 5 (if you stopped Dioptra using `docker compose down`) and 8 (if you stopped the `jupyter lab` process)**. diff --git a/examples/tensorflow-mnist-model-inversion-legacy/demo.ipynb b/examples/tensorflow-mnist-model-inversion-legacy/demo.ipynb new file mode 100644 index 000000000..021d2717b --- /dev/null +++ b/examples/tensorflow-mnist-model-inversion-legacy/demo.ipynb @@ -0,0 +1,394 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tensorflow MNIST MIFace Model Inversion demo (legacy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + ">⚠️ **Warning:** The attack used in this demo is computationally expensive and will take a very long to complete if run using the CPUs found in a typical personal computer.\n", + "> For this reason, it is highly recommended that you run these demos on a CUDA-compatible GPU.\n", + "\n", + "This notebook contains a demonstration of using Dioptra to run experiments that investigate the model inversion attack when launched on a neural network model trained on the MNIST dataset. Please see the [example README](README.md) for instructions on how to prepare your environment for running this example." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Below we import the necessary Python modules and ensure the proper environment variables are set so that all the code blocks will work as expected," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Import packages from the Python standard library\n", + "import importlib.util\n", + "import os\n", + "import pprint\n", + "import sys\n", + "import warnings\n", + "from pathlib import Path\n", + "\n", + "\n", + "def register_python_source_file(module_name: str, filepath: Path) -> None:\n", + " \"\"\"Import a source file directly.\n", + "\n", + " Args:\n", + " module_name: The module name to associate with the imported source file.\n", + " filepath: The path to the source file.\n", + "\n", + " Notes:\n", + " Adapted from the following implementation in the Python documentation:\n", + " https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly\n", + " \"\"\"\n", + " spec = importlib.util.spec_from_file_location(module_name, str(filepath))\n", + " module = importlib.util.module_from_spec(spec)\n", + " sys.modules[module_name] = module\n", + " spec.loader.exec_module(module)\n", + "\n", + "\n", + "# Filter out warning messages\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", + "# Experiment name\n", + "EXPERIMENT_NAME = \"mnist_model_inversion\"\n", + "\n", + "# Default address for accessing the RESTful API service\n", + "RESTAPI_ADDRESS = \"http://localhost:80\"\n", + "\n", + "# Set DIOPTRA_RESTAPI_URI variable if not defined, used to connect to RESTful API service\n", + "if os.getenv(\"DIOPTRA_RESTAPI_URI\") is None:\n", + " os.environ[\"DIOPTRA_RESTAPI_URI\"] = RESTAPI_ADDRESS\n", + "\n", + "# Default address for accessing the MLFlow Tracking server\n", + "MLFLOW_TRACKING_URI = \"http://localhost:35000\"\n", + "\n", + "# Set MLFLOW_TRACKING_URI variable, used to connect to MLFlow Tracking service\n", + "if os.getenv(\"MLFLOW_TRACKING_URI\") is None:\n", + " os.environ[\"MLFLOW_TRACKING_URI\"] = MLFLOW_TRACKING_URI\n", + "\n", + "# Path to workflows archive\n", + "WORKFLOWS_TAR_GZ = Path(\"workflows.tar.gz\")\n", + "\n", + "# Register the examples/scripts directory as a Python module\n", + "register_python_source_file(\"scripts\", Path(\"..\", \"scripts\", \"__init__.py\"))\n", + "\n", + "from scripts.client import DioptraClient\n", + "from scripts.utils import make_tar\n", + "\n", + "# Import third-party Python packages\n", + "import numpy as np\n", + "from mlflow.tracking import MlflowClient\n", + "\n", + "# Create random number generator\n", + "rng = np.random.default_rng(54399264723942495723666216079516778448)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We obtained a copy of the MNIST dataset when we ran `download_data.py` script. If you have not done so already, see [How to Obtain Common Datasets](https://pages.nist.gov/dioptra/getting-started/acquiring-datasets.html).\n", + "The training and testing images for the MNIST dataset are stored within the `/dioptra/data/Mnist` directory as PNG files that are organized into the following folder structure," + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " Mnist\n", + " ├── testing\n", + " │ ├── 0\n", + " │ ├── 1\n", + " │ ├── 2\n", + " │ ├── 3\n", + " │ ├── 4\n", + " │ ├── 5\n", + " │ ├── 6\n", + " │ ├── 7\n", + " │ ├── 8\n", + " │ └── 9\n", + " └── training\n", + " ├── 0\n", + " ├── 1\n", + " ├── 2\n", + " ├── 3\n", + " ├── 4\n", + " ├── 5\n", + " ├── 6\n", + " ├── 7\n", + " ├── 8\n", + " └── 9" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The subfolders under `training/` and `testing/` are the classification labels for the images in the dataset.\n", + "This folder structure is a standardized way to encode the label information and many libraries can make use of it, including the Tensorflow library that we are using for this particular demo." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Submit and run jobs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The entrypoints that we will be running in this example are implemented in the Python source files under `src/` and the `src/MLproject` file.\n", + "To run these entrypoints within Dioptra's architecture, we need to package those files up into an archive and submit it to the Dioptra RESTful API to create a new job.\n", + "For convenience, we provide the `make_tar` helper function defined in `examples/scripts/utils.py`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "make_tar([\"src\"], WORKFLOWS_TAR_GZ)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To connect with the endpoint, we will use a client class defined in the `examples/scripts/client.py` file that is able to connect with the Dioptra RESTful API using the HTTP protocol.\n", + "We connect using the client below.\n", + "The client uses the environment variable `DIOPTRA_RESTAPI_URI`, which we configured at the top of the notebook, to figure out how to connect to the Dioptra RESTful API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "restapi_client = DioptraClient()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We need to register an experiment under which to collect our job runs.\n", + "The code below checks if the relevant experiment named `\"mnist_model_inversion\"` exists.\n", + "If it does, then it just returns info about the experiment, if it doesn't, it then registers the new experiment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "response_experiment = restapi_client.get_experiment_by_name(name=EXPERIMENT_NAME)\n", + "\n", + "if response_experiment is None or \"Not Found\" in response_experiment.get(\"message\", []):\n", + " response_experiment = restapi_client.register_experiment(name=EXPERIMENT_NAME)\n", + "\n", + "response_experiment" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to train our model. Since it is recommended that we use GPUs, we will submit our job to the `\"tensorflow_gpu\"` queue. We will train one model using either the shallow net or the LeNet-5 architecture. As we are using a dedicated GPU for this example, the training time when using a LeNet-5 model will be much shorter than when using a CPU, allowing for the choice between the two model architectures.\n", + "\n", + "So that we do not start this code by accident, we are embedding the code in a text block instead of keeping it in an executable code block.\n", + "**If you need to train one of the models, create a new code block and copy and paste the code into it.**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```python\n", + "response_shallow_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join([\n", + " \"-P model_architecture=shallow_net\",\n", + " \"-P register_model_name=model_inversion_shallow_net\",\n", + " ]),\n", + " queue=\"tensorflow_gpu\",\n", + " timeout=\"1h\",\n", + ")\n", + "\n", + "print(\"Training job for shallow neural network submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_shallow_train)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "```python\n", + "response_le_net_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join([\n", + " \"-P model_architecture=le_net\",\n", + " \"-P register_model_name=model_inversion_le_net\",\n", + " ]),\n", + " queue=\"tensorflow_gpu\",\n", + " timeout=\"1h\",\n", + ")\n", + "\n", + "print(\"Training job for LeNet-5 neural network submitted\")\n", + "print(\"\")\n", + "pprint.pprint(response_le_net_train)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have a trained MNIST model, we can attack it with the model inversion attack. Model inversion has a number of potential inputs, including:\n", + "\n", + "| parameter | data type | description |\n", + "| --- | --- | --- |\n", + "| `max_iter` | _int_ | The maximum number of iterations for gradient descent. |\n", + "| `window_length` | _int_ | The length of the window for checking whether the descent should be aborted. |\n", + "| `threshold` | _float_ | The threshold for descent stopping criterion. |\n", + "| `learning_rate` | _float_ | The learning rate for the gradient descent. |" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can attack a shallow net architecture model using the code below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "response_shallow_net_miface = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"mi\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " f\"-P model_name=model_inversion_shallow_net\",\n", + " \"-P model_version=1\",\n", + " \"-P batch_size=512\",\n", + " \"-P max_iter=10000\",\n", + " \"-P window_length=50\",\n", + " \"-P threshold=0.99\",\n", + " \"-P learning_rate=0.01\",\n", + " ]\n", + " ),\n", + " queue=\"tensorflow_gpu\",\n", + " timeout=\"2h\",\n", + ")\n", + "\n", + "pprint.pprint(response_shallow_net_miface)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Similarly, we can attack a LeNet-5 architecture model using the code below. Note that the code is the same except the entry point keyword argument being changed to use our LeNet-5 model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response_le_net_miface = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"mi\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " f\"-P model_name=model_inversion_le_net\",\n", + " \"-P model_version=1\",\n", + " \"-P batch_size=512\",\n", + " \"-P max_iter=10000\",\n", + " \"-P window_length=50\",\n", + " \"-P threshold=0.99\",\n", + " \"-P learning_rate=0.01\",\n", + " ]\n", + " ),\n", + " queue=\"tensorflow_gpu\",\n", + " timeout=\"2h\",\n", + ")\n", + "\n", + "pprint.pprint(response_le_net_miface)" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "edee40310913f16e2ca02c1d37887bcb7f07f00399ca119bb7e27de7d632ea99" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/tensorflow-mnist-model-inversion-legacy/src/MLproject b/examples/tensorflow-mnist-model-inversion-legacy/src/MLproject new file mode 100644 index 000000000..eeece5e05 --- /dev/null +++ b/examples/tensorflow-mnist-model-inversion-legacy/src/MLproject @@ -0,0 +1,70 @@ +# This Software (Dioptra) is being made available as a public service by the +# National Institute of Standards and Technology (NIST), an Agency of the United +# States Department of Commerce. This software was developed in part by employees of +# NIST and in part by NIST contractors. Copyright in portions of this software that +# were developed by NIST contractors has been licensed or assigned to NIST. Pursuant +# to Title 17 United States Code Section 105, works of NIST employees are not +# subject to copyright protection in the United States. However, NIST may hold +# international copyright in software created by its employees and domestic +# copyright (or licensing rights) in portions of software that were assigned or +# licensed to NIST. To the extent that NIST holds copyright in this software, it is +# being made available under the Creative Commons Attribution 4.0 International +# license (CC BY 4.0). The disclaimers of the CC BY 4.0 license apply to all parts +# of the software developed or licensed by NIST. +# +# ACCESS THE FULL CC BY 4.0 LICENSE HERE: +# https://creativecommons.org/licenses/by/4.0/legalcode + +name: mnist-model-inversion + +entry_points: + mi: + parameters: + adv_tar_name: {type: string, default: "testing_adversarial_mi.tar.gz"} + adv_data_dir: {type: string, default: "adv_testing"} + model_name: {type: string, default: "mnist_le_net"} + model_version: {type: string, default: "1"} + batch_size: {type: float, default: 32} + classes: {type: int, default: 10} + max_iter: {type: int, default: 10000} + window_length: {type: int, default: 100} + threshold: {type: float, default: 0.99} + learning_rate: {type: float, default: 0.1} + seed: {type: float, default: -1} + command: > + python mi.py + --adv-tar-name {adv_tar_name} + --adv-data-dir {adv_data_dir} + --model-name {model_name} + --model-version {model_version} + --batch-size {batch_size} + --max-iter {max_iter} + --window-length {window_length} + --threshold {threshold} + --learning-rate {learning_rate} + --seed {seed} + + train: + parameters: + data_dir: { type: path, default: "/dioptra/data/Mnist" } + image_size: { type: string, default: "28,28,1" } + model_architecture: { type: string, default: "le_net" } + epochs: { type: float, default: 30 } + batch_size: { type: float, default: 32 } + register_model_name: { type: string, default: "" } + learning_rate: { type: float, default: 0.001 } + optimizer: { type: string, default: "Adam" } + validation_split: { type: float, default: 0.2 } + seed: { type: float, default: -1 } + command: > + python train.py + --data-dir {data_dir} + --image-size {image_size} + --model-architecture {model_architecture} + --epochs {epochs} + --batch-size {batch_size} + --register-model-name {register_model_name} + --learning-rate {learning_rate} + --optimizer {optimizer} + --validation-split {validation_split} + --seed {seed} diff --git a/examples/tensorflow-mnist-model-inversion/src/mi.py b/examples/tensorflow-mnist-model-inversion-legacy/src/mi.py similarity index 99% rename from examples/tensorflow-mnist-model-inversion/src/mi.py rename to examples/tensorflow-mnist-model-inversion-legacy/src/mi.py index 37c286fc3..d884541a9 100644 --- a/examples/tensorflow-mnist-model-inversion/src/mi.py +++ b/examples/tensorflow-mnist-model-inversion-legacy/src/mi.py @@ -21,6 +21,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-model-inversion/src/train.py b/examples/tensorflow-mnist-model-inversion-legacy/src/train.py similarity index 99% rename from examples/tensorflow-mnist-model-inversion/src/train.py rename to examples/tensorflow-mnist-model-inversion-legacy/src/train.py index 3810d1ef1..ae2cda389 100644 --- a/examples/tensorflow-mnist-model-inversion/src/train.py +++ b/examples/tensorflow-mnist-model-inversion-legacy/src/train.py @@ -22,6 +22,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-model-inversion/src/MLproject b/examples/tensorflow-mnist-model-inversion/src/MLproject index eeece5e05..8511af761 100644 --- a/examples/tensorflow-mnist-model-inversion/src/MLproject +++ b/examples/tensorflow-mnist-model-inversion/src/MLproject @@ -32,22 +32,23 @@ entry_points: learning_rate: {type: float, default: 0.1} seed: {type: float, default: -1} command: > - python mi.py - --adv-tar-name {adv_tar_name} - --adv-data-dir {adv_data_dir} - --model-name {model_name} - --model-version {model_version} - --batch-size {batch_size} - --max-iter {max_iter} - --window-length {window_length} - --threshold {threshold} - --learning-rate {learning_rate} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment mi.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment mi.yml + -P adv_tar_name={adv_tar_name} + -P adv_data_dir={adv_data_dir} + -P model_name={model_name} + -P model_version={model_version} + -P batch_size={batch_size} + -P max_iter={max_iter} + -P window_length={window_length} + -P threshold={threshold} + -P learning_rate={learning_rate} + -P seed={seed} train: parameters: - data_dir: { type: path, default: "/dioptra/data/Mnist" } - image_size: { type: string, default: "28,28,1" } + training_dir: { type: path, default: "/dioptra/data/Mnist/training" } + testing_dir: { type: path, default: "/dioptra/data/Mnist/testing" } + image_size: { type: string, default: "[28,28,1]" } model_architecture: { type: string, default: "le_net" } epochs: { type: float, default: 30 } batch_size: { type: float, default: 32 } @@ -57,14 +58,14 @@ entry_points: validation_split: { type: float, default: 0.2 } seed: { type: float, default: -1 } command: > - python train.py - --data-dir {data_dir} - --image-size {image_size} - --model-architecture {model_architecture} - --epochs {epochs} - --batch-size {batch_size} - --register-model-name {register_model_name} - --learning-rate {learning_rate} - --optimizer {optimizer} - --validation-split {validation_split} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment train.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment train.yml + -P training_dir={training_dir} + -P testing_dir={testing_dir} + -P image_size={image_size} + -P model_architecture={model_architecture} + -P epochs={epochs} + -P batch_size={batch_size} + -P register_model_name={register_model_name} + -P learning_rate={learning_rate} + -P validation_split={validation_split} + -P seed={seed} diff --git a/examples/tensorflow-mnist-model-inversion/src/mi.yml b/examples/tensorflow-mnist-model-inversion/src/mi.yml new file mode 100644 index 000000000..072728764 --- /dev/null +++ b/examples/tensorflow-mnist-model-inversion/src/mi.yml @@ -0,0 +1,225 @@ +types: + rng: + path: + path_string: + union: [string, path] + path_string_null: + union: [path_string, "null"] + dirs: + list: path_string + parameters: + mapping: [string, number] + kwargs: + mapping: [string, any] + kwargs_null: + union: [kwargs, "null"] + keras_classifier: + distance_metric_request: + mapping: [string, string] + distance_metrics_requests: + list: distance_metric_request + distance_metric: + tuple: [string, any] + distance_metrics: + list: distance_metric + distance_metrics_null: + union: [distance_metrics, "null"] + dataframe: + image_size: + tuple: [integer, integer, integer] + norm: + union: [integer, number, string] + +parameters: + adv_tar_name: testing_adversarial_mi.tar.gz + adv_data_dir: adv_testing + model_name: mnist_le_net + model_version: 1 + batch_size: 32 + classes: 10 + max_iter: 1000 + window_length: 100 + threshold: .99 + learning_rate: 0.1 + seed: -1 + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + make_directories: + plugin: dioptra_builtins.artifacts.utils.make_directories + inputs: + - dirs: dirs + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + load_classifier: + plugin: dioptra_builtins.registry.art.load_wrapped_tensorflow_keras_classifier + inputs: + - name: name + type: string + required: true + - version: integer + - name: classifier_kwargs + type: kwargs + required: false + outputs: + classifier: keras_classifier + + infer_model_inversion: + plugin: dioptra_custom.model_inversion.modelinversion.infer_model_inversion + inputs: + - keras_classifier: keras_classifier + - adv_data_dir: path_string + - name: batch_size + type: integer + required: false + - name: classes + type: integer + required: false + - name: max_iter + type: integer + required: false + - name: window_length + type: integer + required: false + - name: threshold + type: number + required: false + - name: learning_rate + type: number + required: false + + create_dataset: + plugin: dioptra_builtins.attacks.fgm.create_adversarial_fgm_dataset + inputs: + - data_dir: string + - adv_data_dir: path_string + - keras_classifier: keras_classifier + - image_size: image_size + - name: distance_metrics_list + type: distance_metrics_null + required: false + - name: rescale + type: number + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + - name: eps + type: number + required: false + - name: eps_step + type: number + required: false + - name: minimal + type: boolean + required: false + - name: norm + type: norm + required: false + outputs: + dataset: dataframe + + upload_artifact_directory: + plugin: dioptra_builtins.artifacts.mlflow.upload_directory_as_tarball_artifact + inputs: + - source_dir: path_string + - tarball_filename: string + - name: tarball_write_mode + type: string + required: false + - name: working_dir + type: path_string + required: false + + upload_artifact_dataframe: + plugin: dioptra_builtins.artifacts.mlflow.upload_data_frame_artifact + inputs: + - data_frame: dataframe + - file_name: string + - file_format: string + - name: file_format_kwargs + type: kwargs_null + required: false + - name: working_dir + type: path_string_null + required: false + +graph: + init_rng: + init_rng: $seed + + tensorflow_global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow_results: + init_tensorflow: $tensorflow_global_seed + + make_directories_results: + make_directories: [[$adv_data_dir]] + + log_mlflow_params_result: + log_params: + - entry_point_seed: $seed + tensorflow_global_seed: $tensorflow_global_seed + dataset_seed: $dataset_seed + + keras_classifier: + load_classifier: + name: $model_name + version: $model_version + dependencies: init_tensorflow_results + + inferred: + infer_model_inversion: + keras_classifier: $keras_classifier + adv_data_dir: $adv_data_dir + batch_size: $batch_size + classes: $classes + max_iter: $max_iter + window_length: $window_length + threshold: $threshold + learning_rate: $learning_rate + dependencies: make_directories_results + + upload_directory: + upload_artifact_directory: + - $adv_data_dir + - $adv_tar_name + dependencies: inferred diff --git a/examples/tensorflow-mnist-model-inversion/src/train.yml b/examples/tensorflow-mnist-model-inversion/src/train.yml new file mode 100644 index 000000000..3325d8bbb --- /dev/null +++ b/examples/tensorflow-mnist-model-inversion/src/train.yml @@ -0,0 +1,308 @@ +types: + rng: + optimizer: + name_parameters: + mapping: + name: string + parameters: + mapping: [string, any] + metrics_list: + list: name_parameters + performance_metrics: + metrics: + callbacks_in: + list: name_parameters + callbacks_out: + directory_iterator: + parameters: + mapping: [string, number] + image_size: + tuple: [integer, integer, integer] + sequential: + fit_kwargs: + mapping: [string, any] + fit_kwargs_null: + union: [fit_kwargs, "null"] + str_null: + union: [string, "null"] + num_null: + union: [number, "null"] + +parameters: + seed: -1 + optimizer_name: Adam + learning_rate: 0.001 + training_dir: /nfs/data/training + testing_dir: /nfs/data/testing + image_size: [28, 28, 1] + validation_split: 0.2 + batch_size: 32 + model_architecture: le_net + epochs: 30 + register_model_name: "" + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + get_optimizer: + plugin: dioptra_custom.evaluation.tensorflow.get_optimizer + inputs: + - name: name + type: string + - learning_rate: number + outputs: + optimizer: optimizer + + get_perf_metrics: + plugin: dioptra_custom.evaluation.tensorflow.get_performance_metrics + inputs: + - metrics_list: metrics_list + outputs: + performance_metrics: performance_metrics + + get_callbacks: + plugin: dioptra_custom.evaluation.tensorflow.get_model_callbacks + inputs: + - callbacks_list: callbacks_in + outputs: + callbacks: callbacks_out + + create_dataset: + plugin: dioptra_builtins.data.tensorflow.create_image_dataset + inputs: + - data_dir: string + - subset: str_null + - image_size: image_size + - seed: integer + - name: rescale + type: number + required: false + - name: validation_split + type: num_null + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + outputs: + dataset: directory_iterator + + get_num_classes: + plugin: dioptra_builtins.data.tensorflow.get_n_classes_from_directory_iterator + inputs: + - ds: directory_iterator + outputs: + num_classes: integer + + init_classifier: + plugin: dioptra_builtins.estimators.keras_classifiers.init_classifier + inputs: + - model_architecture: string + - optimizer: optimizer + - metrics: performance_metrics + - input_shape: image_size + - n_classes: integer + - name: loss + type: string + required: false + outputs: + classifier: sequential + + fit_model: + plugin: dioptra_builtins.estimators.methods.fit + inputs: + - estimator: any + - x: any + - name: y + type: any + required: false + - name: fit_kwargs + type: fit_kwargs_null + required: false + + eval_metrics_tensorflow: + plugin: dioptra_custom.evaluation.tensorflow.evaluate_metrics_tensorflow + inputs: + - classifier: sequential + - dataset: directory_iterator + outputs: + metrics: metrics + + log_metrics: + plugin: dioptra_builtins.tracking.mlflow.log_metrics + inputs: + - metrics: metrics + + log_keras_estimator: + plugin: dioptra_builtins.tracking.mlflow.log_tensorflow_keras_estimator + inputs: + - estimator: sequential + - model_dir: string + + add_model_to_registry: + plugin: dioptra_custom.evaluation.mlflow.add_model_to_registry + inputs: + - name: name + type: string + - model_dir: string + +graph: + init_rng: + init_rng: $seed + + global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow: + init_tensorflow: $global_seed + + log_params: + log_params: + - entry_point_seed: $init_rng.seed + tensorflow_global_seed: $global_seed + dataset_seed: $dataset_seed + + optimizer: + get_optimizer: [$optimizer_name, $learning_rate] + dependencies: + - init_tensorflow + + perf_metrics: + get_perf_metrics: + - - name: CategoricalAccuracy + parameters: { name: accuracy } + - name: Precision + parameters: { name: precision } + - name: Recall + parameters: { name: recall } + - name: AUC + parameters: { name: auc } + dependencies: + - init_tensorflow + + callbacks: + get_callbacks: + - - name: EarlyStopping + parameters: + monitor: val_loss + min_delta: .01 + patience: 5 + restore_best_weights: true + dependencies: + - init_tensorflow + + training_dataset: + create_dataset: + data_dir: $training_dir + subset: training + image_size: $image_size + seed: $dataset_seed + validation_split: $validation_split + batch_size: $batch_size + dependencies: + - init_tensorflow + + validation_dataset: + create_dataset: + data_dir: $training_dir + subset: validation + image_size: $image_size + seed: $dataset_seed + validation_split: $validation_split + batch_size: $batch_size + dependencies: + - init_tensorflow + + testing_dataset: + create_dataset: + data_dir: $testing_dir + subset: null + image_size: $image_size + seed: $dataset_seed + validation_split: null + batch_size: $batch_size + dependencies: + - init_tensorflow + + num_classes: + get_num_classes: $training_dataset + + classifier: + init_classifier: + - $model_architecture + - $optimizer + - $perf_metrics + - $image_size + - $num_classes + dependencies: + - init_tensorflow + + model: + fit_model: + estimator: $classifier + x: $training_dataset + fit_kwargs: + nb_epochs: $epochs + validation_data: $validation_dataset + callbacks: $callbacks + verbose: 2 + + eval_metrics_tensorflow: + eval_metrics_tensorflow: + - $classifier + - $testing_dataset + dependencies: + - model + + log_metrics: + log_metrics: $eval_metrics_tensorflow + + log_keras_estimator: + log_keras_estimator: + - $classifier + - model + dependencies: + - model + + add_model_to_registry: + add_model_to_registry: + - $register_model_name + - model + dependencies: + - log_keras_estimator diff --git a/examples/tensorflow-mnist-pixel-threshold-legacy/README.md b/examples/tensorflow-mnist-pixel-threshold-legacy/README.md new file mode 100644 index 000000000..f6d75e356 --- /dev/null +++ b/examples/tensorflow-mnist-pixel-threshold-legacy/README.md @@ -0,0 +1,21 @@ +# Tensorflow MNIST Pixel Threshold Demo (legacy) + +The demo provided in the Jupyter notebook file [demo.ipynb](demo.ipynb) uses Dioptra to run experiments that investigate the effects of the pixel threshold attack when launched on a neural network model trained on the MNIST dataset. + +## Running the example + +To prepare your environment for running this example, follow the linked instructions below: + +1. [Create and activate a Python virtual environment and install the necessary dependencies](../README.md#creating-a-virtual-environment) +2. [Download the MNIST dataset using the download_data.py script.](../README.md#downloading-datasets) +3. [Follow the links in these User Setup instructions](../../README.md#user-setup) to do the following: + - Build the containers + - Use the cookiecutter template to generate the scripts, configuration files, and Docker Compose files you will need to run Dioptra +4. [Edit the docker-compose.yml file to mount the data folder in the worker containers](../README.md#mounting-the-data-folder-in-the-worker-containers) +5. [Initialize and start Dioptra](https://pages.nist.gov/dioptra/getting-started/running-dioptra.html#initializing-the-deployment) +6. [Register the custom task plugins for Dioptra's examples and demos](../README.md#registering-custom-task-plugins) +7. [Register the queues for Dioptra's examples and demos](../README.md#registering-queues) +8. [Start JupyterLab and open `demo.ipynb`](../README.md#starting-jupyter-lab) + +Steps 1–4 and 6–7 only need to be run once. +**Returning users only need to repeat Steps 5 (if you stopped Dioptra using `docker compose down`) and 8 (if you stopped the `jupyter lab` process)**. diff --git a/examples/tensorflow-mnist-pixel-threshold-legacy/demo.ipynb b/examples/tensorflow-mnist-pixel-threshold-legacy/demo.ipynb new file mode 100644 index 000000000..cf6fe416b --- /dev/null +++ b/examples/tensorflow-mnist-pixel-threshold-legacy/demo.ipynb @@ -0,0 +1,336 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tensorflow MNIST Pixel Threshold demo (legacy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook contains a demonstration of how to use Dioptra to run experiments that investigate the effects of the pixel threshold attack when launched on a neural network model trained on the MNIST dataset." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Below we import the necessary Python modules and ensure the proper environment variables are set so that all the code blocks will work as expected," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Import packages from the Python standard library\n", + "import importlib.util\n", + "import os\n", + "import pprint\n", + "import sys\n", + "import warnings\n", + "from pathlib import Path\n", + "\n", + "\n", + "def register_python_source_file(module_name: str, filepath: Path) -> None:\n", + " \"\"\"Import a source file directly.\n", + "\n", + " Args:\n", + " module_name: The module name to associate with the imported source file.\n", + " filepath: The path to the source file.\n", + "\n", + " Notes:\n", + " Adapted from the following implementation in the Python documentation:\n", + " https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly\n", + " \"\"\"\n", + " spec = importlib.util.spec_from_file_location(module_name, str(filepath))\n", + " module = importlib.util.module_from_spec(spec)\n", + " sys.modules[module_name] = module\n", + " spec.loader.exec_module(module)\n", + "\n", + "\n", + "# Filter out warning messages\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", + "# Experiment name\n", + "EXPERIMENT_NAME = \"mnist_pixel_threshold\"\n", + "\n", + "# Default address for accessing the RESTful API service\n", + "RESTAPI_ADDRESS = \"http://localhost:80\"\n", + "\n", + "# Set DIOPTRA_RESTAPI_URI variable if not defined, used to connect to RESTful API service\n", + "if os.getenv(\"DIOPTRA_RESTAPI_URI\") is None:\n", + " os.environ[\"DIOPTRA_RESTAPI_URI\"] = RESTAPI_ADDRESS\n", + "\n", + "# Default address for accessing the RESTful API service\n", + "MLFLOW_TRACKING_URI = \"http://localhost:35000\"\n", + "\n", + "# Set MLFLOW_TRACKING_URI variable, used to connect to MLFlow Tracking service\n", + "if os.getenv(\"MLFLOW_TRACKING_URI\") is None:\n", + " os.environ[\"MLFLOW_TRACKING_URI\"] = MLFLOW_TRACKING_URI\n", + "\n", + "# Path to workflows archive\n", + "WORKFLOWS_TAR_GZ = Path(\"workflows.tar.gz\")\n", + "\n", + "# Register the examples/scripts directory as a Python module\n", + "register_python_source_file(\"scripts\", Path(\"..\", \"scripts\", \"__init__.py\"))\n", + "\n", + "from scripts.client import DioptraClient\n", + "from scripts.utils import make_tar\n", + "\n", + "# Import third-party Python packages\n", + "import numpy as np\n", + "from mlflow.tracking import MlflowClient\n", + "\n", + "# Create random number generator\n", + "rng = np.random.default_rng(54399264723942495723666216079516778448)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dataset" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We obtained a copy of the MNIST dataset when we ran `download_data.py` script. If you have not done so already, see [How to Obtain Common Datasets](https://pages.nist.gov/dioptra/getting-started/acquiring-datasets.html).\n", + "The training and testing images for the MNIST dataset are stored within the `/dioptra/data/Mnist` directory as PNG files that are organized into the following folder structure," + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " Mnist\n", + " ├── testing\n", + " │ ├── 0\n", + " │ ├── 1\n", + " │ ├── 2\n", + " │ ├── 3\n", + " │ ├── 4\n", + " │ ├── 5\n", + " │ ├── 6\n", + " │ ├── 7\n", + " │ ├── 8\n", + " │ └── 9\n", + " └── training\n", + " ├── 0\n", + " ├── 1\n", + " ├── 2\n", + " ├── 3\n", + " ├── 4\n", + " ├── 5\n", + " ├── 6\n", + " ├── 7\n", + " ├── 8\n", + " └── 9" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The subfolders under `training/` and `testing/` are the classification labels for the images in the dataset.\n", + "This folder structure is a standardized way to encode the label information and many libraries can make use of it, including the Tensorflow library that we are using for this particular demo." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Submit and run jobs" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The entrypoints that we will be running in this example are implemented in the Python source files under `src/` and the `src/MLproject` file.\n", + "To run these entrypoints within Dioptra's architecture, we need to package those files up into an archive and submit it to the Dioptra RESTful API to create a new job.\n", + "For convenience, we provide the `make_tar` helper function defined in `examples/scripts/utils.py`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "make_tar([\"src\"], WORKFLOWS_TAR_GZ)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To connect with the endpoint, we will use a client class defined in the `examples/scripts/client.py` file that is able to connect with the Dioptra RESTful API using the HTTP protocol.\n", + "We connect using the client below.\n", + "The client uses the environment variable `DIOPTRA_RESTAPI_URI`, which we configured at the top of the notebook, to figure out how to connect to the Dioptra RESTful API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "restapi_client = DioptraClient()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We need to register an experiment under which to collect our job runs.\n", + "The code below checks if the relevant experiment exists.\n", + "If it does, then it just returns info about the experiment, if it doesn't, it then registers the new experiment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "response_experiment = restapi_client.get_experiment_by_name(name=EXPERIMENT_NAME)\n", + "\n", + "if response_experiment is None or \"Not Found\" in response_experiment.get(\"message\", []):\n", + " response_experiment = restapi_client.register_experiment(name=EXPERIMENT_NAME)\n", + "\n", + "response_experiment" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to create our model.\n", + "In this example, we will be creating an MNIST classifier. So, we use the ```train``` entry point to simply train a new MNIST model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! python ../scripts/register_task_plugins.py --force --plugins-dir ../task-plugins" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "response_train = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"train\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " \"-P model_architecture=le_net\",\n", + " \"-P epochs=30\",\n", + " \"-P register_model_name=mnist_le_net\",\n", + " ]\n", + " ),\n", + " queue=\"tensorflow_cpu\",\n", + " timeout=\"1h\",\n", + ")\n", + "\n", + "pprint.pprint(response_train)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have an MNIST trained model, we can run the Pixel Threshold attack on it.\n", + "The Pixel Threshold attack attempts to change a limited number of pixels in a test image in an attempt to get it misclassified.\n", + "It has two main arguments:\n", + "\n", + "| parameter | data type | description |\n", + "| --- | --- | --- |\n", + "| th | int | The maximum number of pixels it is allowed to change |\n", + "| `es` | int | If 0, then use the CMA-ES strategy, or if 1, use the DE strategy for evolution |" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "response_pt = restapi_client.submit_job(\n", + " workflows_file=WORKFLOWS_TAR_GZ,\n", + " experiment_name=EXPERIMENT_NAME,\n", + " entry_point=\"pt\",\n", + " entry_point_kwargs=\" \".join(\n", + " [\n", + " \"-P model_name=mnist_le_net\",\n", + " \"-P model_version=1\",\n", + " \"-P batch_size=32\",\n", + " \"-P th=1\",\n", + " \"-P es=0\",\n", + " ]\n", + " ),\n", + " queue=\"tensorflow_gpu\",\n", + " timeout=\"1h\",\n", + ")\n", + "\n", + "pprint.pprint(response_pt)" + ] + } + ], + "metadata": { + "interpreter": { + "hash": "edee40310913f16e2ca02c1d37887bcb7f07f00399ca119bb7e27de7d632ea99" + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/tensorflow-mnist-pixel-threshold-legacy/src/MLproject b/examples/tensorflow-mnist-pixel-threshold-legacy/src/MLproject new file mode 100644 index 000000000..8ab56a1b5 --- /dev/null +++ b/examples/tensorflow-mnist-pixel-threshold-legacy/src/MLproject @@ -0,0 +1,69 @@ +# This Software (Dioptra) is being made available as a public service by the +# National Institute of Standards and Technology (NIST), an Agency of the United +# States Department of Commerce. This software was developed in part by employees of +# NIST and in part by NIST contractors. Copyright in portions of this software that +# were developed by NIST contractors has been licensed or assigned to NIST. Pursuant +# to Title 17 United States Code Section 105, works of NIST employees are not +# subject to copyright protection in the United States. However, NIST may hold +# international copyright in software created by its employees and domestic +# copyright (or licensing rights) in portions of software that were assigned or +# licensed to NIST. To the extent that NIST holds copyright in this software, it is +# being made available under the Creative Commons Attribution 4.0 International +# license (CC BY 4.0). The disclaimers of the CC BY 4.0 license apply to all parts +# of the software developed or licensed by NIST. +# +# ACCESS THE FULL CC BY 4.0 LICENSE HERE: +# https://creativecommons.org/licenses/by/4.0/legalcode +name: mnist-pixel-threshold + +entry_points: + pt: + parameters: + data_dir: { type: path, default: "/dioptra/data/Mnist" } + image_size: { type: string, default: "28,28,1" } + adv_tar_name: { type: string, default: "testing_adversarial_pt.tar.gz" } + adv_data_dir: { type: string, default: "adv_testing" } + model_name: { type: string, default: "mnist_le_net" } + model_version: { type: string, default: "1" } + batch_size: { type: float, default: 32 } + th: { type: int, default 1 } + es: { type: int, default 0 } + seed: { type: float, default: -1 } + clip_values: { type: string, default: "0,1" } + command: > + python pt.py + --data-dir {data_dir} + --image-size {image_size} + --adv-tar-name {adv_tar_name} + --adv-data-dir {adv_data_dir} + --model-name {model_name} + --model-version {model_version} + --batch-size {batch_size} + --th {th} + --es {es} + --seed {seed} + --clip-values {clip_values} + train: + parameters: + data_dir: { type: path, default: "/dioptra/data/Mnist" } + image_size: { type: string, default: "28,28,1" } + model_architecture: { type: string, default: "le_net" } + epochs: { type: float, default: 30 } + batch_size: { type: float, default: 32 } + register_model_name: { type: string, default: "" } + learning_rate: { type: float, default: 0.001 } + optimizer: { type: string, default: "Adam" } + validation_split: { type: float, default: 0.2 } + seed: { type: float, default: -1 } + command: > + python train.py + --data-dir {data_dir} + --image-size {image_size} + --model-architecture {model_architecture} + --epochs {epochs} + --batch-size {batch_size} + --register-model-name {register_model_name} + --learning-rate {learning_rate} + --optimizer {optimizer} + --validation-split {validation_split} + --seed {seed} diff --git a/examples/tensorflow-mnist-pixel-threshold/src/pt.py b/examples/tensorflow-mnist-pixel-threshold-legacy/src/pt.py similarity index 99% rename from examples/tensorflow-mnist-pixel-threshold/src/pt.py rename to examples/tensorflow-mnist-pixel-threshold-legacy/src/pt.py index 761e9e5e4..2fa0521be 100644 --- a/examples/tensorflow-mnist-pixel-threshold/src/pt.py +++ b/examples/tensorflow-mnist-pixel-threshold-legacy/src/pt.py @@ -22,6 +22,7 @@ import click import mlflow import numpy as np +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-pixel-threshold/src/train.py b/examples/tensorflow-mnist-pixel-threshold-legacy/src/train.py similarity index 99% rename from examples/tensorflow-mnist-pixel-threshold/src/train.py rename to examples/tensorflow-mnist-pixel-threshold-legacy/src/train.py index eb58d38fc..29d2ac10b 100644 --- a/examples/tensorflow-mnist-pixel-threshold/src/train.py +++ b/examples/tensorflow-mnist-pixel-threshold-legacy/src/train.py @@ -21,6 +21,7 @@ import click import mlflow +import sklearn # noqa: F401 import structlog from prefect import Flow, Parameter from prefect.utilities.logging import get_logger as get_prefect_logger diff --git a/examples/tensorflow-mnist-pixel-threshold/src/MLproject b/examples/tensorflow-mnist-pixel-threshold/src/MLproject index c43e80468..0d9dfc8eb 100644 --- a/examples/tensorflow-mnist-pixel-threshold/src/MLproject +++ b/examples/tensorflow-mnist-pixel-threshold/src/MLproject @@ -19,8 +19,8 @@ name: mnist-pixel-threshold entry_points: pt: parameters: - data_dir: { type: path, default: "/dioptra/data/Mnist" } - image_size: { type: string, default: "28,28,1" } + data_dir: { type: path, default: "/dioptra/data/Mnist/testing" } + image_size: { type: string, default: "[28,28,1]" } adv_tar_name: { type: string, default: "testing_adversarial_pt.tar.gz" } adv_data_dir: { type: string, default: "adv_testing" } model_name: { type: string, default: "mnist_le_net" } @@ -29,61 +29,42 @@ entry_points: th: { type: int, default 1 } es: { type: int, default 0 } seed: { type: float, default: -1 } - clip_values: { type: string, default: "0,1" } + clip_values: { type: string, default: "[0,1]" } command: > - python pt.py - --data-dir {data_dir} - --image-size {image_size} - --adv-tar-name {adv_tar_name} - --adv-data-dir {adv_data_dir} - --model-name {model_name} - --model-version {model_version} - --batch-size {batch_size} - --th {th} - --es {es} - --seed {seed} - --clip-values {clip_values} - - infer: - parameters: - run_id: { type: string } - image_size: { type: string, default: "28,28,1" } - model_name: { type: string, default: "mnist_le_net" } - model_version: { type: string, default: "1" } - adv_tar_name: { type: string, default: "testing_adversarial_pt.tar.gz" } - adv_data_dir: { type: string, default: "adv_testing" } - seed: { type: float, default: -1 } - command: > - python infer.py - --run-id {run_id} - --image-size {image_size} - --model-name {model_name} - --model-version {model_version} - --adv-tar-name {adv_tar_name} - --adv-data-dir {adv_data_dir} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment pt.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment pt.yml + -P data_dir={data_dir} + -P image_size={image_size} + -P adv_tar_name={adv_tar_name} + -P adv_data_dir={adv_data_dir} + -P model_name={model_name} + -P model_version={model_version} + -P batch_size={batch_size} + -P th={th} + -P es={es} + -P seed={seed} + -P clip_values={clip_values} train: parameters: - data_dir: { type: path, default: "/dioptra/data/Mnist" } - image_size: { type: string, default: "28,28,1" } + training_dir: { type: path, default: "/dioptra/data/Mnist/training" } + testing_dir: { type: path, default: "/dioptra/data/Mnist/testing" } + image_size: { type: string, default: "[28,28,1]" } model_architecture: { type: string, default: "le_net" } epochs: { type: float, default: 30 } batch_size: { type: float, default: 32 } register_model_name: { type: string, default: "" } learning_rate: { type: float, default: 0.001 } - optimizer: { type: string, default: "Adam" } validation_split: { type: float, default: 0.2 } seed: { type: float, default: -1 } command: > - python train.py - --data-dir {data_dir} - --image-size {image_size} - --model-architecture {model_architecture} - --epochs {epochs} - --batch-size {batch_size} - --register-model-name {register_model_name} - --learning-rate {learning_rate} - --optimizer {optimizer} - --validation-split {validation_split} - --seed {seed} + PYTHONPATH=$DIOPTRA_PLUGIN_DIR validate-experiment train.yml && PYTHONPATH=$DIOPTRA_PLUGIN_DIR run-experiment train.yml + -P training_dir={training_dir} + -P testing_dir={testing_dir} + -P image_size={image_size} + -P model_architecture={model_architecture} + -P epochs={epochs} + -P batch_size={batch_size} + -P register_model_name={register_model_name} + -P learning_rate={learning_rate} + -P validation_split={validation_split} + -P seed={seed} diff --git a/examples/tensorflow-mnist-pixel-threshold/src/infer.py b/examples/tensorflow-mnist-pixel-threshold/src/infer.py deleted file mode 100644 index d80244feb..000000000 --- a/examples/tensorflow-mnist-pixel-threshold/src/infer.py +++ /dev/null @@ -1,220 +0,0 @@ -#!/usr/bin/env python -# This Software (Dioptra) is being made available as a public service by the -# National Institute of Standards and Technology (NIST), an Agency of the United -# States Department of Commerce. This software was developed in part by employees of -# NIST and in part by NIST contractors. Copyright in portions of this software that -# were developed by NIST contractors has been licensed or assigned to NIST. Pursuant -# to Title 17 United States Code Section 105, works of NIST employees are not -# subject to copyright protection in the United States. However, NIST may hold -# international copyright in software created by its employees and domestic -# copyright (or licensing rights) in portions of software that were assigned or -# licensed to NIST. To the extent that NIST holds copyright in this software, it is -# being made available under the Creative Commons Attribution 4.0 International -# license (CC BY 4.0). The disclaimers of the CC BY 4.0 license apply to all parts -# of the software developed or licensed by NIST. -# -# ACCESS THE FULL CC BY 4.0 LICENSE HERE: -# https://creativecommons.org/licenses/by/4.0/legalcode -import os -from pathlib import Path - -import click -import mlflow -import mlflow.tensorflow -import structlog -from prefect import Flow, Parameter -from prefect.utilities.logging import get_logger as get_prefect_logger -from structlog.stdlib import BoundLogger -from tasks import evaluate_metrics_tensorflow - -from dioptra import pyplugs -from dioptra.sdk.utilities.contexts import plugin_dirs -from dioptra.sdk.utilities.logging import ( - StderrLogStream, - StdoutLogStream, - attach_stdout_stream_handler, - clear_logger_handlers, - configure_structlog, - set_logging_level, -) - -_PLUGINS_IMPORT_PATH: str = "dioptra_builtins" -LOGGER: BoundLogger = structlog.stdlib.get_logger() - - -def _coerce_comma_separated_ints(ctx, param, value): - return tuple(int(x.strip()) for x in value.split(",")) - - -@click.command() -@click.option( - "--run-id", - type=click.STRING, - help="MLFlow Run ID of a successful fgm attack", -) -@click.option( - "--image-size", - type=click.STRING, - callback=_coerce_comma_separated_ints, - help="Dimensions for the input images", -) -@click.option( - "--model-name", - type=click.STRING, - help="Name of model to load from registry", -) -@click.option( - "--model-version", - type=click.STRING, - help="Version of model to load from registry", -) -@click.option( - "--adv-tar-name", - type=click.STRING, - default="testing_adversarial_fgm.tar.gz", - help="Name of tarfile artifact containing fgm images", -) -@click.option( - "--adv-data-dir", - type=click.STRING, - default="adv_testing", - help="Directory in tarfile containing fgm images", -) -@click.option( - "--seed", - type=click.INT, - help="Set the entry point rng seed", - default=-1, -) -def infer_adversarial( - run_id, image_size, model_name, model_version, adv_tar_name, adv_data_dir, seed -): - LOGGER.info( - "Execute MLFlow entry point", - entry_point="infer", - image_size=image_size, - model_name=model_name, - model_version=model_version, - adv_tar_name=adv_tar_name, - adv_data_dir=adv_data_dir, - seed=seed, - ) - - with mlflow.start_run() as active_run: # noqa: F841 - flow: Flow = init_infer_flow() - state = flow.run( - parameters=dict( - image_size=image_size, - model_name=model_name, - model_version=model_version, - run_id=run_id, - adv_tar_name=adv_tar_name, - adv_data_dir=(Path.cwd() / adv_data_dir).resolve(), - seed=seed, - ) - ) - - return state - - -def init_infer_flow() -> Flow: - with Flow("Inference") as flow: - ( - image_size, - model_name, - model_version, - run_id, - adv_tar_name, - adv_data_dir, - seed, - ) = ( - Parameter("image_size"), - Parameter("model_name"), - Parameter("model_version"), - Parameter("run_id"), - Parameter("adv_tar_name"), - Parameter("adv_data_dir"), - Parameter("seed"), - ) - seed, rng = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.random", "rng", "init_rng", seed=seed - ) - tensorflow_global_seed = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.random", "sample", "draw_random_integer", rng=rng - ) - dataset_seed = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.random", "sample", "draw_random_integer", rng=rng - ) - init_tensorflow_results = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.backend_configs", - "tensorflow", - "init_tensorflow", - seed=tensorflow_global_seed, - ) - - log_mlflow_params_result = pyplugs.call_task( # noqa: F841 - f"{_PLUGINS_IMPORT_PATH}.tracking", - "mlflow", - "log_parameters", - parameters=dict( - entry_point_seed=seed, - tensorflow_global_seed=tensorflow_global_seed, - dataset_seed=dataset_seed, - ), - ) - adv_tar_path = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.artifacts", - "mlflow", - "download_all_artifacts_in_run", - run_id=run_id, - artifact_path=adv_tar_name, - ) - extract_tarfile_results = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.artifacts", - "utils", - "extract_tarfile", - filepath=adv_tar_path, - ) - adv_ds = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.data", - "tensorflow", - "create_image_dataset", - data_dir=adv_data_dir, - subset=None, - validation_split=None, - image_size=image_size, - seed=dataset_seed, - upstream_tasks=[init_tensorflow_results, extract_tarfile_results], - ) - classifier = pyplugs.call_task( - f"{_PLUGINS_IMPORT_PATH}.registry", - "mlflow", - "load_tensorflow_keras_classifier", - name=model_name, - version=model_version, - upstream_tasks=[init_tensorflow_results], - ) - classifier_performance_metrics = evaluate_metrics_tensorflow( - classifier=classifier, dataset=adv_ds - ) - log_classifier_performance_metrics_result = pyplugs.call_task( # noqa: F841 - f"{_PLUGINS_IMPORT_PATH}.tracking", - "mlflow", - "log_metrics", - metrics=classifier_performance_metrics, - ) - - return flow - - -if __name__ == "__main__": - log_level: str = os.getenv("DIOPTRA_JOB_LOG_LEVEL", default="INFO") - as_json: bool = True if os.getenv("DIOPTRA_JOB_LOG_AS_JSON") else False - - clear_logger_handlers(get_prefect_logger()) - attach_stdout_stream_handler(as_json) - set_logging_level(log_level) - configure_structlog() - - with plugin_dirs(), StdoutLogStream(as_json), StderrLogStream(as_json): - _ = infer_adversarial() diff --git a/examples/tensorflow-mnist-pixel-threshold/src/pt.yml b/examples/tensorflow-mnist-pixel-threshold/src/pt.yml new file mode 100644 index 000000000..c318f6665 --- /dev/null +++ b/examples/tensorflow-mnist-pixel-threshold/src/pt.yml @@ -0,0 +1,225 @@ +types: + rng: + path: + path_string: + union: [string, path] + path_string_null: + union: [path_string, "null"] + dirs: + list: path_string + parameters: + mapping: [string, number] + kwargs: + mapping: [string, any] + kwargs_null: + union: [kwargs, "null"] + keras_classifier: + distance_metric_request: + mapping: [string, string] + distance_metrics_requests: + list: distance_metric_request + distance_metric: + tuple: [string, any] + distance_metrics: + list: distance_metric + distance_metrics_null: + union: [distance_metrics, "null"] + dataframe: + image_size: + tuple: [integer, integer, integer] + +parameters: + data_dir: /dioptra/data/Mnist/testing + image_size: [28, 28, 1] + adv_tar_name: testing_adversarial_pt.tar.gz + adv_data_dir: adv_testing + distance_metrics_filename: distance_metrics.csv + model_name: mnist_le_net + model_version: 1 + clip_values: [0, 1] + batch_size: 32 + th: 1 + es: 0 + seed: -1 + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + make_directories: + plugin: dioptra_builtins.artifacts.utils.make_directories + inputs: + - dirs: dirs + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + load_classifier: + plugin: dioptra_builtins.registry.art.load_wrapped_tensorflow_keras_classifier + inputs: + - name: name + type: string + required: true + - version: integer + - classifier_kwargs: kwargs_null + outputs: + classifier: keras_classifier + + distance_metrics: + plugin: dioptra_builtins.metrics.distance.get_distance_metric_list + inputs: + - request: distance_metrics_requests + outputs: + distance_metrics_list: distance_metrics + + create_dataset: + plugin: dioptra_custom.pixel_threshold.pixelthreshold.create_pt_dataset + inputs: + - data_dir: string + - adv_data_dir: path_string + - keras_classifier: keras_classifier + - image_size: image_size + - name: distance_metrics_list + type: distance_metrics_null + required: false + - name: rescale + type: number + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + - name: th + type: integer + required: false + - name: es + type: integer + required: false + outputs: + dataset: dataframe + + upload_artifact_directory: + plugin: dioptra_builtins.artifacts.mlflow.upload_directory_as_tarball_artifact + inputs: + - source_dir: path_string + - tarball_filename: string + - name: tarball_write_mode + type: string + required: false + - name: working_dir + type: path_string_null + required: false + + upload_artifact_dataframe: + plugin: dioptra_builtins.artifacts.mlflow.upload_data_frame_artifact + inputs: + - data_frame: dataframe + - file_name: string + - file_format: string + - name: file_format_kwargs + type: kwargs_null + required: false + - name: working_dir + type: path_string_null + required: false + +graph: + init_rng: + init_rng: $seed + + tensorflow_global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow_results: + init_tensorflow: $tensorflow_global_seed + + make_directories_results: + make_directories: [[$adv_data_dir]] + + log_mlflow_params_result: + log_params: + - entry_point_seed: $seed + tensorflow_global_seed: $tensorflow_global_seed + dataset_seed: $dataset_seed + + keras_classifier: + load_classifier: + name: $model_name + version: $model_version + classifier_kwargs: + clip_values: $clip_values + dependencies: init_tensorflow_results + + distance_metrics: + distance_metrics: + - - name: l_infinity_norm + func: l_inf_norm + - name: l_1_norm + func: l_1_norm + - name: l_2_norm + func: l_2_norm + - name: cosine_similarity + func: paired_cosine_similarities + - name: euclidean_distance + func: paired_euclidean_distances + - name: manhattan_distance + func: paired_manhattan_distances + - name: wasserstein_distance + func: paired_wasserstein_distances + + dataset: + create_dataset: + data_dir: $data_dir + keras_classifier: $keras_classifier + distance_metrics_list: $distance_metrics + adv_data_dir: $adv_data_dir + batch_size: $batch_size + image_size: $image_size + th: $th + es: $es + dependencies: make_directories_results + + upload_directory: + upload_artifact_directory: + - $adv_data_dir + - $adv_tar_name + dependencies: dataset + + upload_dataset: + upload_artifact_dataframe: + - $dataset + - $distance_metrics_filename + - csv.gz + - index: false diff --git a/examples/tensorflow-mnist-pixel-threshold/src/train.yml b/examples/tensorflow-mnist-pixel-threshold/src/train.yml new file mode 100644 index 000000000..b5b4c72c1 --- /dev/null +++ b/examples/tensorflow-mnist-pixel-threshold/src/train.yml @@ -0,0 +1,296 @@ +types: + rng: + optimizer: + name_parameters: + mapping: + name: string + parameters: + mapping: [string, any] + metrics_list: + list: name_parameters + performance_metrics: + metrics: + callbacks_in: + list: name_parameters + callbacks_out: + directory_iterator: + parameters: + mapping: [string, number] + image_size: + tuple: [integer, integer, integer] + sequential: + fit_kwargs: + mapping: [string, any] + fit_kwargs_null: + union: [fit_kwargs, "null"] + str_null: + union: [string, "null"] + num_null: + union: [number, "null"] + +parameters: + seed: -1 + optimizer_name: Adam + learning_rate: 0.001 + training_dir: /nfs/data/training + testing_dir: /nfs/data/testing + image_size: [28, 28, 1] + validation_split: 0.2 + batch_size: 32 + model_architecture: le_net + epochs: 30 + register_model_name: "" + +tasks: + init_rng: + plugin: dioptra_builtins.random.rng.init_rng + inputs: + - name: seed + type: integer + required: false + outputs: + - seed: integer + - rng: rng + + draw_random: + plugin: dioptra_builtins.random.sample.draw_random_integer + inputs: + - rng: rng + - name: low + type: integer + required: false + - name: high + type: integer + required: false + outputs: + value: integer + + init_tensorflow: + plugin: dioptra_builtins.backend_configs.tensorflow.init_tensorflow + inputs: + - seed: integer + + log_params: + plugin: dioptra_builtins.tracking.mlflow.log_parameters + inputs: + - parameters: parameters + + get_optimizer: + plugin: dioptra_custom.evaluation.tensorflow.get_optimizer + inputs: + - name: name + type: string + - learning_rate: number + outputs: + optimizer: optimizer + + get_perf_metrics: + plugin: dioptra_custom.evaluation.tensorflow.get_performance_metrics + inputs: + - metrics_list: metrics_list + outputs: + performance_metrics: performance_metrics + + get_callbacks: + plugin: dioptra_custom.evaluation.tensorflow.get_model_callbacks + inputs: + - callbacks_list: callbacks_in + outputs: + callbacks: callbacks_out + + create_dataset: + plugin: dioptra_builtins.data.tensorflow.create_image_dataset + inputs: + - data_dir: string + - subset: str_null + - image_size: image_size + - seed: integer + - name: rescale + type: number + required: false + - name: validation_split + type: num_null + required: false + - name: batch_size + type: integer + required: false + - name: label_mode + type: string + required: false + outputs: + dataset: directory_iterator + + get_num_classes: + plugin: dioptra_builtins.data.tensorflow.get_n_classes_from_directory_iterator + inputs: + - ds: directory_iterator + outputs: + num_classes: integer + + init_classifier: + plugin: dioptra_builtins.estimators.keras_classifiers.init_classifier + inputs: + - model_architecture: string + - optimizer: optimizer + - metrics: performance_metrics + - input_shape: image_size + - n_classes: integer + - name: loss + type: string + required: false + outputs: + classifier: sequential + + fit_model: + plugin: dioptra_builtins.estimators.methods.fit + inputs: + - estimator: any + - x: any + - name: y + type: any + required: false + - name: fit_kwargs + type: fit_kwargs_null + required: false + + eval_metrics_tensorflow: + plugin: dioptra_custom.evaluation.tensorflow.evaluate_metrics_tensorflow + inputs: + - classifier: sequential + - dataset: directory_iterator + outputs: + metrics: metrics + + log_metrics: + plugin: dioptra_builtins.tracking.mlflow.log_metrics + inputs: + - metrics: metrics + + log_keras_estimator: + plugin: dioptra_builtins.tracking.mlflow.log_tensorflow_keras_estimator + inputs: + - estimator: sequential + - model_dir: string + + add_model_to_registry: + plugin: dioptra_custom.evaluation.mlflow.add_model_to_registry + inputs: + - name: name + type: string + - model_dir: string + +graph: + init_rng: + init_rng: $seed + + global_seed: + draw_random: $init_rng.rng + + dataset_seed: + draw_random: $init_rng.rng + + init_tensorflow: + init_tensorflow: $global_seed + + log_params: + log_params: + - entry_point_seed: $init_rng.seed + tensorflow_global_seed: $global_seed + dataset_seed: $dataset_seed + + optimizer: + get_optimizer: [$optimizer_name, $learning_rate] + dependencies: init_tensorflow + + perf_metrics: + get_perf_metrics: + - - name: CategoricalAccuracy + parameters: { name: accuracy } + - name: Precision + parameters: { name: precision } + - name: Recall + parameters: { name: recall } + - name: AUC + parameters: { name: auc } + dependencies: init_tensorflow + + callbacks: + get_callbacks: + - - name: EarlyStopping + parameters: + monitor: val_loss + min_delta: .01 + patience: 5 + restore_best_weights: true + dependencies: init_tensorflow + + training_dataset: + create_dataset: + data_dir: $training_dir + subset: training + image_size: $image_size + seed: $dataset_seed + validation_split: $validation_split + batch_size: $batch_size + dependencies: init_tensorflow + + validation_dataset: + create_dataset: + data_dir: $training_dir + subset: validation + image_size: $image_size + seed: $dataset_seed + validation_split: $validation_split + batch_size: $batch_size + dependencies: init_tensorflow + + testing_dataset: + create_dataset: + data_dir: $testing_dir + subset: null + image_size: $image_size + seed: $dataset_seed + validation_split: null + batch_size: $batch_size + dependencies: init_tensorflow + + num_classes: + get_num_classes: $training_dataset + + classifier: + init_classifier: + - $model_architecture + - $optimizer + - $perf_metrics + - $image_size + - $num_classes + + model: + fit_model: + estimator: $classifier + x: $training_dataset + fit_kwargs: + nb_epochs: $epochs + validation_data: $validation_dataset + callbacks: $callbacks + verbose: 2 + + eval_metrics_tensorflow: + eval_metrics_tensorflow: + - $classifier + - $testing_dataset + dependencies: model + + log_metrics: + log_metrics: $eval_metrics_tensorflow + + log_keras_estimator: + log_keras_estimator: + - $classifier + - model + + add_model_to_registry: + add_model_to_registry: + - $register_model_name + - model + dependencies: log_keras_estimator diff --git a/pyproject.toml b/pyproject.toml index 4612b14c4..33f19b1f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,11 +46,12 @@ dependencies = [ "boto3>=1.16.0", "Click>=8.0.0,<9", "entrypoints>=0.3", - "Flask>=2.0.0,<2.2.0", + "Flask>=2.0.0", "flask-accepts>=0.17.0", "Flask-Cors>=3.0.1", "Flask-Injector>=0.14.0", "Flask-Login>=0.6.0", + "Flask-Login>=0.6.0", "Flask-Migrate>=2.5.0", "flask-restx>=0.5.1", "Flask-SQLAlchemy>=2.4.0", diff --git a/requirements-dev-tensorflow.in b/requirements-dev-tensorflow.in index 98e0d9a45..e225b1326 100644 --- a/requirements-dev-tensorflow.in +++ b/requirements-dev-tensorflow.in @@ -1,3 +1,4 @@ tensorflow-cpu==2.12.1; (sys_platform == "linux" or sys_platform == "win32" or sys_platform == "darwin") and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") +tensorflow-cpu-aws==2.12.1; sys_platform == "linux" and (platform_machine == "aarch64" or platform_machine == "arm64") tensorflow-macos==2.12.0; (sys_platform == "darwin") and (platform_machine == "aarch64" or platform_machine == "arm64") tensorflow-metal==0.8.0; (sys_platform == "darwin") and (platform_machine == "aarch64" or platform_machine == "arm64") diff --git a/requirements/linux-aarch64-py3.9-requirements-dev-tensorflow.txt b/requirements/linux-aarch64-py3.9-requirements-dev-tensorflow.txt deleted file mode 100644 index bedf28bbe..000000000 --- a/requirements/linux-aarch64-py3.9-requirements-dev-tensorflow.txt +++ /dev/null @@ -1,924 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-aarch64-py3.9-requirements-dev-tensorflow.txt pyproject.toml requirements-dev-tensorflow.in requirements-dev.in -# --e file:.#egg=dioptra - # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 - # via dioptra (pyproject.toml) -aiohttp==3.8.5 - # via dioptra (pyproject.toml) -aiosignal==1.3.1 - # via aiohttp -alabaster==0.7.13 - # via sphinx -alembic==1.11.1 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-migrate - # mlflow -aniso8601==9.0.1 - # via flask-restx -anyio==3.7.1 - # via jupyter-server -appdirs==1.4.4 - # via esbonio -argon2-cffi==21.3.0 - # via jupyter-server -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -arrow==1.2.3 - # via - # isoduration - # jinja2-time -asttokens==2.2.1 - # via stack-data -async-lru==2.0.4 - # via jupyterlab -async-timeout==4.0.2 - # via - # aiohttp - # redis -attrs==23.1.0 - # via - # aiohttp - # cattrs - # flake8-bugbear - # jsonschema - # lsprotocol - # referencing -autopep8==2.0.2 - # via dioptra (pyproject.toml) -babel==2.12.1 - # via - # jupyterlab-server - # sphinx -backcall==0.2.0 - # via ipython -beautifulsoup4==4.12.2 - # via nbconvert -binaryornot==0.4.4 - # via cookiecutter -bleach==6.0.0 - # via - # kaggle - # nbconvert -boto3==1.28.16 - # via - # dioptra - # dioptra (pyproject.toml) -botocore==1.31.16 - # via - # boto3 - # s3transfer -build==0.10.0 - # via - # dioptra (pyproject.toml) - # pip-tools -cachetools==5.3.1 - # via tox -cattrs==23.1.2 - # via lsprotocol -certifi==2023.7.22 - # via - # kaggle - # requests -cffi==1.15.1 - # via - # argon2-cffi-bindings - # cryptography -chardet==5.1.0 - # via - # binaryornot - # tox -charset-normalizer==3.2.0 - # via - # aiohttp - # requests -click==8.1.6 - # via - # cookiecutter - # dask - # databricks-cli - # dioptra - # dioptra (pyproject.toml) - # distributed - # flask - # mlflow - # pip-tools - # prefect - # rq -cloudpickle==2.2.1 - # via - # dask - # distributed - # mlflow - # prefect -colorama==0.4.6 - # via tox -comm==0.1.3 - # via - # ipykernel - # ipywidgets -contourpy==1.1.0 - # via matplotlib -cookiecutter==2.1.1 - # via dioptra (pyproject.toml) -croniter==1.4.1 - # via prefect -cryptography==3.4.8 - # via dioptra (pyproject.toml) -cycler==0.11.0 - # via matplotlib -dask==2023.7.1 - # via - # distributed - # prefect -databricks-cli==0.17.7 - # via mlflow -debugpy==1.6.7 - # via ipykernel -decorator==5.1.1 - # via ipython -defusedxml==0.7.1 - # via nbconvert -distlib==0.3.7 - # via virtualenv -distributed==2023.7.1 - # via prefect -dnspython==2.4.1 - # via email-validator -docker==6.1.3 - # via - # mlflow - # prefect -docutils==0.17.1 - # via sphinx -email-validator==2.0.0.post2 - # via wtforms -entrypoints==0.4 - # via - # dioptra - # dioptra (pyproject.toml) - # mlflow -esbonio==0.16.1 - # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 - # via - # anyio - # cattrs -executing==1.2.0 - # via stack-data -fastjsonschema==2.18.0 - # via nbformat -filelock==3.12.2 - # via - # tox - # virtualenv -flake8==6.1.0 - # via - # dioptra (pyproject.toml) - # flake8-bugbear -flake8-bugbear==23.7.10 - # via dioptra (pyproject.toml) -flask==2.1.3 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-cors - # flask-injector - # flask-migrate - # flask-restx - # flask-sqlalchemy - # flask-wtf - # mlflow - # prometheus-flask-exporter -flask-accepts==0.18.4 - # via - # dioptra - # dioptra (pyproject.toml) -flask-cors==4.0.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 - # via - # dioptra - # dioptra (pyproject.toml) -flask-restx==1.1.0 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-accepts -flask-sqlalchemy==2.5.1 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-migrate -flask-wtf==1.1.1 - # via - # dioptra - # dioptra (pyproject.toml) -fonttools==4.41.1 - # via matplotlib -fqdn==1.5.1 - # via jsonschema -frozenlist==1.4.0 - # via - # aiohttp - # aiosignal -fsspec==2023.6.0 - # via - # dask - # universal-pathlib -gitdb==4.0.10 - # via gitpython -gitpython==3.1.32 - # via mlflow -greenlet==2.0.2 - # via sqlalchemy -gunicorn==21.2.0 - # via mlflow -idna==3.4 - # via - # anyio - # email-validator - # jsonschema - # requests - # yarl -imageio==2.31.1 - # via - # imgaug - # scikit-image -imagesize==1.4.1 - # via sphinx -imgaug==0.4.0 - # via dioptra (pyproject.toml) -importlib-metadata==6.8.0 - # via - # dask - # flask - # jupyter-client - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # mlflow - # nbconvert - # sphinx - # typeguard -importlib-resources==6.0.0 - # via - # matplotlib - # prefect -injector==0.21.0 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 - # via - # dioptra (pyproject.toml) - # jupyter - # jupyter-console - # jupyterlab - # qtconsole -ipython==8.14.0 - # via - # dioptra (pyproject.toml) - # ipykernel - # ipywidgets - # jupyter-console -ipython-genutils==0.2.0 - # via qtconsole -ipywidgets==8.1.0 - # via jupyter -isoduration==20.11.0 - # via jsonschema -itsdangerous==2.1.2 - # via - # flask - # flask-wtf -jedi==0.19.0 - # via ipython -jinja2==3.1.2 - # via - # cookiecutter - # distributed - # flask - # jinja2-time - # jupyter-server - # jupyterlab - # jupyterlab-server - # nbconvert - # sphinx -jinja2-time==0.2.0 - # via cookiecutter -jmespath==1.0.1 - # via - # boto3 - # botocore -joblib==1.3.1 - # via scikit-learn -json5==0.9.14 - # via jupyterlab-server -jsonpointer==2.4 - # via jsonschema -jsonschema[format-nongpl]==4.18.4 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-restx - # jupyter-events - # jupyterlab-server - # nbformat -jsonschema-specifications==2023.7.1 - # via jsonschema -jupyter==1.0.0 - # via dioptra (pyproject.toml) -jupyter-client==8.3.0 - # via - # ipykernel - # jupyter-console - # jupyter-server - # nbclient - # qtconsole -jupyter-console==6.6.3 - # via jupyter -jupyter-core==5.3.1 - # via - # ipykernel - # jupyter-client - # jupyter-console - # jupyter-server - # jupyterlab - # nbclient - # nbconvert - # nbformat - # qtconsole -jupyter-events==0.7.0 - # via jupyter-server -jupyter-lsp==2.2.0 - # via jupyterlab -jupyter-server==2.7.0 - # via - # jupyter-lsp - # jupyterlab - # jupyterlab-server - # notebook - # notebook-shim -jupyter-server-terminals==0.4.4 - # via jupyter-server -jupyterlab==4.0.3 - # via - # dioptra (pyproject.toml) - # notebook -jupyterlab-pygments==0.2.2 - # via nbconvert -jupyterlab-server==2.24.0 - # via - # jupyterlab - # notebook -jupyterlab-widgets==3.0.8 - # via ipywidgets -kaggle==1.5.16 - # via dioptra (pyproject.toml) -kiwisolver==1.4.4 - # via matplotlib -lazy-loader==0.3 - # via scikit-image -locket==1.0.0 - # via - # distributed - # partd -lsprotocol==2023.0.0a2 - # via pygls -mako==1.2.4 - # via alembic -markdown-it-py==3.0.0 - # via rich -markupsafe==2.1.3 - # via - # jinja2 - # mako - # nbconvert - # wtforms -marshmallow==3.20.1 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-accepts - # marshmallow-oneofschema - # prefect -marshmallow-oneofschema==3.0.1 - # via prefect -matplotlib==3.7.2 - # via imgaug -matplotlib-inline==0.1.6 - # via - # ipykernel - # ipython -mccabe==0.7.0 - # via flake8 -mdurl==0.1.2 - # via markdown-it-py -mistune==3.0.1 - # via nbconvert -mlflow==1.27.0 - # via - # dioptra - # dioptra (pyproject.toml) -msgpack==1.0.5 - # via - # distributed - # prefect -multidict==6.0.4 - # via - # aiohttp - # yarl -multimethod==1.9.1 - # via - # dioptra - # dioptra (pyproject.toml) -mypy-extensions==1.0.0 - # via prefect -nbclient==0.8.0 - # via nbconvert -nbconvert==7.7.3 - # via - # dioptra (pyproject.toml) - # jupyter - # jupyter-server -nbformat==5.9.2 - # via - # jupyter-server - # nbclient - # nbconvert -nest-asyncio==1.5.7 - # via ipykernel -networkx==3.1 - # via scikit-image -notebook==7.0.1 - # via jupyter -notebook-shim==0.2.3 - # via - # jupyterlab - # notebook -numpy==1.25.2 - # via - # adversarial-robustness-toolbox - # contourpy - # dioptra - # dioptra (pyproject.toml) - # imageio - # imgaug - # matplotlib - # mlflow - # opencv-python - # pandas - # pyarrow - # pywavelets - # scikit-image - # scikit-learn - # scipy - # shapely - # tifffile -oauthlib==3.2.2 - # via databricks-cli -opencv-python==4.8.0.74 - # via imgaug -overrides==7.3.1 - # via jupyter-server -packaging==23.1 - # via - # build - # dask - # distributed - # docker - # gunicorn - # ipykernel - # jupyter-server - # jupyterlab - # jupyterlab-server - # marshmallow - # matplotlib - # mlflow - # nbconvert - # prefect - # pyproject-api - # qtconsole - # qtpy - # scikit-image - # sphinx - # tox -pandas==2.0.3 - # via - # dioptra - # dioptra (pyproject.toml) - # mlflow -pandocfilters==1.5.0 - # via nbconvert -parso==0.8.3 - # via jedi -partd==1.4.0 - # via dask -passlib==1.7.4 - # via - # dioptra - # dioptra (pyproject.toml) -pendulum==2.1.2 - # via prefect -pexpect==4.8.0 - # via ipython -pickleshare==0.7.5 - # via ipython -pillow==10.0.0 - # via - # dioptra (pyproject.toml) - # imageio - # imgaug - # matplotlib - # scikit-image -pip-tools==7.1.0 - # via dioptra (pyproject.toml) -platformdirs==3.10.0 - # via - # jupyter-core - # tox - # virtualenv -pluggy==1.2.0 - # via tox -prefect==1.4.1 - # via dioptra (pyproject.toml) -prometheus-client==0.17.1 - # via - # jupyter-server - # prometheus-flask-exporter -prometheus-flask-exporter==0.22.4 - # via mlflow -prompt-toolkit==3.0.39 - # via - # ipython - # jupyter-console -protobuf==4.23.4 - # via mlflow -psutil==5.9.5 - # via - # distributed - # ipykernel -ptyprocess==0.7.0 - # via - # pexpect - # terminado -pure-eval==0.2.2 - # via stack-data -pyarrow==12.0.1 - # via dioptra (pyproject.toml) -pycodestyle==2.11.0 - # via - # autopep8 - # dioptra (pyproject.toml) - # flake8 -pycparser==2.21 - # via cffi -pydocstyle==6.3.0 - # via dioptra (pyproject.toml) -pyflakes==3.1.0 - # via flake8 -pygls==1.0.2 - # via esbonio -pygments==2.15.1 - # via - # ipython - # jupyter-console - # nbconvert - # qtconsole - # rich - # sphinx -pyjwt==2.8.0 - # via databricks-cli -pyparsing==3.0.9 - # via matplotlib -pyproject-api==1.5.3 - # via tox -pyproject-hooks==1.0.0 - # via build -pyspellchecker==0.7.2 - # via esbonio -python-box==7.0.1 - # via prefect -python-dateutil==2.8.2 - # via - # arrow - # botocore - # croniter - # dioptra - # dioptra (pyproject.toml) - # jupyter-client - # kaggle - # matplotlib - # pandas - # pendulum - # prefect -python-json-logger==2.0.7 - # via jupyter-events -python-slugify==8.0.1 - # via - # cookiecutter - # kaggle - # prefect -pytoml==0.1.21 - # via dioptra (pyproject.toml) -pytz==2023.3 - # via - # flask-restx - # mlflow - # pandas - # prefect -pytzdata==2020.1 - # via pendulum -pywavelets==1.4.1 - # via scikit-image -pyyaml==6.0.1 - # via - # cookiecutter - # dask - # dioptra - # dioptra (pyproject.toml) - # distributed - # jupyter-events - # mlflow - # prefect -pyzmq==25.1.0 - # via - # ipykernel - # jupyter-client - # jupyter-console - # jupyter-server - # qtconsole -qtconsole==5.4.3 - # via jupyter -qtpy==2.3.1 - # via qtconsole -querystring-parser==1.2.4 - # via mlflow -redis==4.6.0 - # via - # dioptra - # dioptra (pyproject.toml) - # rq -referencing==0.30.0 - # via - # jsonschema - # jsonschema-specifications - # jupyter-events -requests==2.31.0 - # via - # cookiecutter - # databricks-cli - # dioptra - # dioptra (pyproject.toml) - # docker - # jupyterlab-server - # kaggle - # mlflow - # prefect - # sphinx -rfc3339-validator==0.1.4 - # via - # jsonschema - # jupyter-events -rfc3986-validator==0.1.1 - # via - # jsonschema - # jupyter-events -rich==13.5.2 - # via dioptra (pyproject.toml) -rpds-py==0.9.2 - # via - # jsonschema - # referencing -rq==1.15.1 - # via - # dioptra - # dioptra (pyproject.toml) -s3transfer==0.6.1 - # via boto3 -scikit-image==0.21.0 - # via imgaug -scikit-learn==1.0.2 - # via - # adversarial-robustness-toolbox - # dioptra (pyproject.toml) -scipy==1.11.1 - # via - # adversarial-robustness-toolbox - # dioptra - # dioptra (pyproject.toml) - # imgaug - # mlflow - # scikit-image - # scikit-learn -send2trash==1.8.2 - # via jupyter-server -shapely==2.0.1 - # via imgaug -six==1.16.0 - # via - # adversarial-robustness-toolbox - # asttokens - # bleach - # databricks-cli - # imgaug - # kaggle - # python-dateutil - # querystring-parser - # rfc3339-validator -smmap==5.0.0 - # via gitdb -sniffio==1.3.0 - # via anyio -snowballstemmer==2.2.0 - # via - # pydocstyle - # sphinx -sortedcontainers==2.4.0 - # via distributed -soupsieve==2.4.1 - # via beautifulsoup4 -sphinx==4.5.0 - # via - # dioptra (pyproject.toml) - # esbonio -sphinxcontrib-applehelp==1.0.4 - # via sphinx -sphinxcontrib-devhelp==1.0.2 - # via sphinx -sphinxcontrib-htmlhelp==2.0.1 - # via sphinx -sphinxcontrib-jsmath==1.0.1 - # via sphinx -sphinxcontrib-qthelp==1.0.3 - # via sphinx -sphinxcontrib-serializinghtml==1.1.5 - # via sphinx -sqlalchemy==1.4.49 - # via - # alembic - # dioptra - # dioptra (pyproject.toml) - # flask-sqlalchemy - # mlflow -sqlparse==0.4.4 - # via mlflow -stack-data==0.6.2 - # via ipython -structlog==23.1.0 - # via - # dioptra - # dioptra (pyproject.toml) -tabulate==0.9.0 - # via - # databricks-cli - # prefect -tblib==2.0.0 - # via distributed -terminado==0.17.1 - # via - # jupyter-server - # jupyter-server-terminals -text-unidecode==1.3 - # via python-slugify -threadpoolctl==3.2.0 - # via scikit-learn -tifffile==2023.7.18 - # via scikit-image -tinycss2==1.2.1 - # via nbconvert -toml==0.10.2 - # via - # build - # prefect -tomli==2.0.1 - # via - # autopep8 - # build - # dioptra (pyproject.toml) - # jupyterlab - # pip-tools - # pyproject-api - # pyproject-hooks - # tox -toolz==0.12.0 - # via - # dask - # distributed - # partd -tornado==6.3.2 - # via - # distributed - # ipykernel - # jupyter-client - # jupyter-server - # jupyterlab - # notebook - # terminado -tox==4.6.4 - # via dioptra (pyproject.toml) -tqdm==4.65.0 - # via - # adversarial-robustness-toolbox - # kaggle -traitlets==5.9.0 - # via - # comm - # ipykernel - # ipython - # ipywidgets - # jupyter-client - # jupyter-console - # jupyter-core - # jupyter-events - # jupyter-server - # jupyterlab - # matplotlib-inline - # nbclient - # nbconvert - # nbformat - # qtconsole -typeguard==3.0.2 - # via pygls -typing-extensions==4.7.1 - # via - # alembic - # async-lru - # cattrs - # dioptra - # dioptra (pyproject.toml) - # ipython - # typeguard -tzdata==2023.3 - # via pandas -universal-pathlib==0.0.24 - # via dioptra (pyproject.toml) -uri-template==1.3.0 - # via jsonschema -urllib3==1.26.16 - # via - # botocore - # databricks-cli - # distributed - # docker - # kaggle - # prefect - # requests -virtualenv==20.24.2 - # via tox -wcwidth==0.2.6 - # via prompt-toolkit -webcolors==1.13 - # via jsonschema -webencodings==0.5.1 - # via - # bleach - # tinycss2 -websocket-client==1.6.1 - # via - # docker - # jupyter-server -werkzeug==2.1.2 - # via - # dioptra - # dioptra (pyproject.toml) - # flask - # flask-accepts - # flask-restx -wheel==0.41.0 - # via - # dioptra (pyproject.toml) - # pip-tools -widgetsnbextension==4.0.8 - # via ipywidgets -wtforms[email]==3.0.1 - # via - # dioptra - # dioptra (pyproject.toml) - # flask-wtf -yarl==1.9.2 - # via aiohttp -zict==3.0.0 - # via distributed -zipp==3.16.2 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/requirements/linux-x86_64-py3.9-requirements-dev-pytorch.txt b/requirements/linux-amd64-py3.9-requirements-dev-pytorch.txt similarity index 90% rename from requirements/linux-x86_64-py3.9-requirements-dev-pytorch.txt rename to requirements/linux-amd64-py3.9-requirements-dev-pytorch.txt index 03ea29577..a49a80c56 100644 --- a/requirements/linux-x86_64-py3.9-requirements-dev-pytorch.txt +++ b/requirements/linux-amd64-py3.9-requirements-dev-pytorch.txt @@ -2,16 +2,16 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-requirements-dev-pytorch.txt pyproject.toml requirements-dev-pytorch.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-requirements-dev-pytorch.txt pyproject.toml requirements-dev-pytorch.in requirements-dev.in # --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html -e file:.#egg=dioptra # via -r requirements-dev.in -absl-py==1.4.0 +absl-py==2.0.0 # via tensorboard -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via @@ -21,7 +21,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -33,13 +33,13 @@ antlr4-python3-runtime==4.9.3 # via # hydra-core # omegaconf -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via # black # esbonio -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -47,11 +47,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -63,7 +63,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -81,15 +81,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -103,11 +103,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -115,7 +115,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # black # cookiecutter @@ -138,11 +138,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -152,13 +152,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -168,9 +168,9 @@ detectron2==0.6+cpu ; sys_platform == "linux" and python_version == "3.9" and pl # via -r requirements-dev-pytorch.in distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -187,15 +187,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -203,14 +204,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -225,11 +225,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -247,7 +243,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -255,7 +251,7 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning @@ -266,9 +262,9 @@ fvcore==0.1.5.post20221221 # via detectron2 gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -276,7 +272,7 @@ google-auth-oauthlib==1.0.0 # via tensorboard greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via tensorboard gunicorn==21.2.0 # via mlflow @@ -289,7 +285,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -299,6 +295,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -310,7 +307,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -318,19 +315,18 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector iopath==0.1.9 # via # detectron2 # fvcore -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -338,7 +334,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -365,13 +361,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -383,7 +379,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -392,7 +388,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -407,7 +403,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -416,21 +412,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -442,7 +438,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -465,7 +461,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via # detectron2 # imgaug @@ -484,7 +480,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -492,7 +488,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -502,7 +498,7 @@ mypy-extensions==1.0.0 # prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -512,17 +508,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -555,9 +551,9 @@ omegaconf==2.3.0 # via # detectron2 # hydra-core -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -583,9 +579,8 @@ packaging==23.1 # qtpy # scikit-image # sphinx - # torchmetrics # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -594,7 +589,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -608,7 +603,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # detectron2 # dioptra (pyproject.toml) @@ -618,16 +613,16 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox -portalocker==2.7.0 +portalocker==2.8.2 # via iopath prefect==1.4.1 # via dioptra (pyproject.toml) @@ -641,7 +636,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard @@ -655,7 +650,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -663,7 +658,7 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pycocotools==2.0.6 +pycocotools==2.0.7 # via detectron2 pycodestyle==2.11.0 # via @@ -680,7 +675,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -690,17 +685,17 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via # matplotlib # pydot -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -726,7 +721,7 @@ pytoml==0.1.21 # via dioptra (pyproject.toml) pytorch-lightning==1.9.5 ; python_version == "3.9" # via -r requirements-dev-pytorch.in -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -750,30 +745,30 @@ pyyaml==6.0.1 # prefect # pytorch-lightning # yacs -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications # jupyter-events -regex==2023.6.3 +regex==2023.8.8 # via black requests==2.31.0 # via @@ -800,9 +795,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -812,7 +807,7 @@ rq==1.15.1 # dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -820,7 +815,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -839,13 +834,13 @@ six==1.16.0 # asttokens # bleach # databricks-cli - # google-auth # imgaug # kaggle # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 + # tensorboard +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -855,7 +850,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -896,7 +891,7 @@ tabulate==0.9.0 # prefect tblib==2.0.0 # via distributed -tensorboard==2.13.0 +tensorboard==2.14.1 # via detectron2 tensorboard-data-server==0.7.1 # via tensorboard @@ -912,7 +907,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -944,11 +939,11 @@ torch==1.10.2+cpu ; (sys_platform == "win32" or sys_platform == "linux") and pyt # torchvision torchaudio==0.10.2+cpu ; (sys_platform == "win32" or sys_platform == "linux") and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3+cpu ; (sys_platform == "win32" or sys_platform == "linux") and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -957,9 +952,9 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # detectron2 @@ -967,7 +962,7 @@ tqdm==4.65.0 # iopath # kaggle # pytorch-lightning -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -986,7 +981,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -1000,7 +995,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -1010,11 +1005,10 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -1024,7 +1018,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -1036,12 +1030,11 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools - # tensorboard -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -1056,7 +1049,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/macos-x86_64-py3.9-requirements-dev-tensorflow.txt b/requirements/linux-amd64-py3.9-requirements-dev-tensorflow.txt similarity index 89% rename from requirements/macos-x86_64-py3.9-requirements-dev-tensorflow.txt rename to requirements/linux-amd64-py3.9-requirements-dev-tensorflow.txt index 481a31820..6bc18e360 100644 --- a/requirements/macos-x86_64-py3.9-requirements-dev-tensorflow.txt +++ b/requirements/linux-amd64-py3.9-requirements-dev-tensorflow.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/macos-x86_64-py3.9-requirements-dev-tensorflow.txt pyproject.toml requirements-dev-tensorflow.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-requirements-dev-tensorflow.txt pyproject.toml requirements-dev-tensorflow.in requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -absl-py==1.4.0 +absl-py==2.0.0 # via # tensorboard # tensorflow-cpu -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -26,15 +26,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -appnope==0.1.3 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -42,13 +38,13 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data astunparse==1.6.3 # via tensorflow-cpu async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -60,7 +56,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -76,15 +72,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -98,11 +94,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -110,7 +106,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -131,11 +127,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -145,13 +141,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -159,9 +155,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -178,15 +174,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -194,14 +191,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -216,11 +212,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -240,7 +232,7 @@ flask-wtf==1.1.1 # dioptra (pyproject.toml) flatbuffers==23.5.26 # via tensorflow-cpu -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -248,7 +240,7 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib @@ -256,9 +248,9 @@ gast==0.4.0 # via tensorflow-cpu gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -268,7 +260,7 @@ google-pasta==0.2.0 # via tensorflow-cpu greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via # tensorboard # tensorflow-cpu @@ -283,7 +275,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -293,6 +285,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jax @@ -305,7 +298,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -313,15 +306,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -329,7 +321,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -337,7 +329,7 @@ itsdangerous==2.1.2 # via # flask # flask-wtf -jax==0.4.14 +jax==0.4.16 # via tensorflow-cpu jedi==0.19.0 # via ipython @@ -358,13 +350,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -376,7 +368,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -385,7 +377,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -400,7 +392,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -409,23 +401,23 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) keras==2.12.0 # via tensorflow-cpu -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -435,7 +427,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -458,7 +450,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -470,13 +462,13 @@ mdurl==0.1.2 # via markdown-it-py mistune==3.0.1 # via nbconvert -ml-dtypes==0.2.0 +ml-dtypes==0.3.1 # via jax mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -484,7 +476,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -492,7 +484,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -502,11 +494,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via @@ -541,13 +533,13 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug opt-einsum==3.3.0 # via # jax # tensorflow-cpu -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -572,7 +564,7 @@ packaging==23.1 # sphinx # tensorflow-cpu # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -581,7 +573,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -593,21 +585,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -621,7 +613,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard @@ -636,7 +628,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -657,7 +649,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -667,15 +659,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -699,7 +691,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -719,25 +711,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -766,9 +758,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -778,7 +770,7 @@ rq==1.15.1 # dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -786,7 +778,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -807,7 +799,6 @@ six==1.16.0 # astunparse # bleach # databricks-cli - # google-auth # google-pasta # imgaug # kaggle @@ -815,7 +806,7 @@ six==1.16.0 # querystring-parser # rfc3339-validator # tensorflow-cpu -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -825,7 +816,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -872,7 +863,7 @@ tensorflow-cpu==2.12.1 ; (sys_platform == "linux" or sys_platform == "win32" or # via -r requirements-dev-tensorflow.in tensorflow-estimator==2.12.0 # via tensorflow-cpu -tensorflow-io-gcs-filesystem==0.32.0 +tensorflow-io-gcs-filesystem==0.34.0 # via tensorflow-cpu termcolor==2.3.0 # via tensorflow-cpu @@ -884,7 +875,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -907,7 +898,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -916,13 +907,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -953,7 +944,7 @@ typing-extensions==4.5.0 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -963,11 +954,10 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -977,7 +967,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -989,13 +979,13 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # astunparse # dioptra (pyproject.toml) # pip-tools # tensorboard -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wrapt==1.14.1 # via tensorflow-cpu @@ -1008,7 +998,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/linux-aarch64-py3.9-requirements-dev.txt b/requirements/linux-amd64-py3.9-requirements-dev.txt similarity index 89% rename from requirements/linux-aarch64-py3.9-requirements-dev.txt rename to requirements/linux-amd64-py3.9-requirements-dev.txt index 546b2b714..0ad502535 100644 --- a/requirements/linux-aarch64-py3.9-requirements-dev.txt +++ b/requirements/linux-amd64-py3.9-requirements-dev.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-aarch64-py3.9-requirements-dev.txt pyproject.toml requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-amd64-py3.9-requirements-dev.txt pyproject.toml requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -14,7 +14,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -22,11 +22,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -34,11 +34,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -50,7 +50,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -66,15 +66,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -86,11 +86,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -98,7 +98,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -119,11 +119,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -133,13 +133,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -147,9 +147,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -166,15 +166,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -182,14 +183,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -204,11 +204,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -226,7 +222,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -234,13 +230,13 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -253,7 +249,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -263,6 +259,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -273,7 +270,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -281,15 +278,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -297,7 +293,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -324,13 +320,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -342,7 +338,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -351,7 +347,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -366,7 +362,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -375,21 +371,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -397,7 +393,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -418,7 +414,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -434,7 +430,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -442,7 +438,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -450,7 +446,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -460,17 +456,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -491,9 +487,9 @@ numpy==1.25.2 # tifffile oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -517,7 +513,7 @@ packaging==23.1 # scikit-image # sphinx # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -526,7 +522,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -538,21 +534,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -566,7 +562,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -578,7 +574,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -593,7 +589,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -603,15 +599,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -635,7 +631,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -655,25 +651,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -698,9 +694,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -708,7 +704,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -716,7 +712,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -740,7 +736,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -750,7 +746,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -797,7 +793,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -820,7 +816,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -829,13 +825,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -854,7 +850,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -865,7 +861,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -878,7 +874,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -888,7 +884,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -899,11 +895,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -914,7 +910,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/macos-x86_64-py3.9-requirements-dev-pytorch.txt b/requirements/linux-arm64-py3.9-requirements-dev-pytorch.txt similarity index 89% rename from requirements/macos-x86_64-py3.9-requirements-dev-pytorch.txt rename to requirements/linux-arm64-py3.9-requirements-dev-pytorch.txt index 862e512ad..4abbb397f 100644 --- a/requirements/macos-x86_64-py3.9-requirements-dev-pytorch.txt +++ b/requirements/linux-arm64-py3.9-requirements-dev-pytorch.txt @@ -2,14 +2,14 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/macos-x86_64-py3.9-requirements-dev-pytorch.txt pyproject.toml requirements-dev-pytorch.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-arm64-py3.9-requirements-dev-pytorch.txt pyproject.toml requirements-dev-pytorch.in requirements-dev.in # --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via @@ -19,7 +19,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -27,15 +27,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -appnope==0.1.3 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -43,11 +39,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -59,7 +55,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -75,15 +71,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -95,11 +91,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -107,7 +103,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -128,11 +124,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -142,13 +138,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -156,9 +152,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -175,15 +171,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -191,14 +188,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -213,11 +209,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -235,7 +227,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -243,14 +235,14 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -263,7 +255,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -273,6 +265,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -283,7 +276,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -291,15 +284,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -307,7 +299,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -334,13 +326,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -352,7 +344,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -361,7 +353,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -376,7 +368,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -385,21 +377,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -411,7 +403,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -432,7 +424,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -448,7 +440,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -456,7 +448,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -464,7 +456,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -474,17 +466,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -508,9 +500,9 @@ numpy==1.25.2 # torchvision oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -535,9 +527,8 @@ packaging==23.1 # qtpy # scikit-image # sphinx - # torchmetrics # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -546,7 +537,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -558,7 +549,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio @@ -566,14 +557,14 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -587,7 +578,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -599,7 +590,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -614,7 +605,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -624,15 +615,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -658,7 +649,7 @@ pytoml==0.1.21 # via dioptra (pyproject.toml) pytorch-lightning==1.9.5 ; python_version == "3.9" # via -r requirements-dev-pytorch.in -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -679,25 +670,25 @@ pyyaml==6.0.1 # mlflow # prefect # pytorch-lightning -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -723,9 +714,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -733,7 +724,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -741,7 +732,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -765,7 +756,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -775,7 +766,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -822,7 +813,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -854,11 +845,11 @@ torch==1.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_pl # torchvision torchaudio==0.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -867,14 +858,14 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle # pytorch-lightning -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -893,7 +884,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -907,7 +898,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -920,7 +911,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -930,7 +921,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -941,11 +932,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -956,7 +947,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/linux-arm64-py3.9-requirements-dev-tensorflow.txt b/requirements/linux-arm64-py3.9-requirements-dev-tensorflow.txt new file mode 100644 index 000000000..aa4834782 --- /dev/null +++ b/requirements/linux-arm64-py3.9-requirements-dev-tensorflow.txt @@ -0,0 +1,1008 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-arm64-py3.9-requirements-dev-tensorflow.txt pyproject.toml requirements-dev-tensorflow.in requirements-dev.in +# +-e file:.#egg=dioptra + # via -r requirements-dev.in +absl-py==2.0.0 + # via + # tensorboard + # tensorflow-cpu-aws +adversarial-robustness-toolbox==1.16.0 + # via dioptra (pyproject.toml) +aiohttp==3.8.5 + # via dioptra (pyproject.toml) +aiosignal==1.3.1 + # via aiohttp +alabaster==0.7.13 + # via sphinx +alembic==1.12.0 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-migrate + # mlflow +aniso8601==9.0.1 + # via flask-restx +anyio==4.0.0 + # via jupyter-server +appdirs==1.4.4 + # via esbonio +argon2-cffi==23.1.0 + # via jupyter-server +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +arrow==1.2.3 + # via + # isoduration + # jinja2-time +asttokens==2.4.0 + # via stack-data +astunparse==1.6.3 + # via tensorflow-cpu-aws +async-lru==2.0.4 + # via jupyterlab +async-timeout==4.0.3 + # via + # aiohttp + # redis +attrs==23.1.0 + # via + # aiohttp + # cattrs + # flake8-bugbear + # jsonschema + # lsprotocol + # referencing +autopep8==2.0.4 + # via dioptra (pyproject.toml) +babel==2.12.1 + # via + # jupyterlab-server + # sphinx +backcall==0.2.0 + # via ipython +beautifulsoup4==4.12.2 + # via nbconvert +binaryornot==0.4.4 + # via cookiecutter +bleach==6.0.0 + # via + # kaggle + # nbconvert +boto3==1.28.57 + # via + # dioptra + # dioptra (pyproject.toml) +botocore==1.31.57 + # via + # boto3 + # s3transfer +build==1.0.3 + # via + # dioptra (pyproject.toml) + # pip-tools +cachetools==5.3.1 + # via + # google-auth + # tox +cattrs==23.1.2 + # via lsprotocol +certifi==2023.7.22 + # via + # kaggle + # requests +cffi==1.16.0 + # via + # argon2-cffi-bindings + # cryptography +chardet==5.2.0 + # via + # binaryornot + # tox +charset-normalizer==3.2.0 + # via + # aiohttp + # requests +click==8.1.7 + # via + # cookiecutter + # dask + # databricks-cli + # dioptra + # dioptra (pyproject.toml) + # distributed + # flask + # mlflow + # pip-tools + # prefect + # rq +cloudpickle==2.2.1 + # via + # dask + # distributed + # mlflow + # prefect +colorama==0.4.6 + # via tox +comm==0.1.4 + # via + # ipykernel + # ipywidgets +contourpy==1.1.1 + # via matplotlib +cookiecutter==2.1.1 + # via dioptra (pyproject.toml) +croniter==1.4.1 + # via prefect +cryptography==3.4.8 + # via dioptra (pyproject.toml) +cycler==0.11.0 + # via matplotlib +dask==2023.9.2 + # via + # distributed + # prefect +databricks-cli==0.17.8 + # via mlflow +debugpy==1.8.0 + # via ipykernel +decorator==5.1.1 + # via ipython +defusedxml==0.7.1 + # via nbconvert +distlib==0.3.7 + # via virtualenv +distributed==2023.9.2 + # via prefect +dnspython==2.4.2 + # via email-validator +docker==6.1.3 + # via + # mlflow + # prefect +docutils==0.17.1 + # via sphinx +email-validator==2.0.0.post2 + # via wtforms +entrypoints==0.4 + # via + # dioptra + # dioptra (pyproject.toml) + # mlflow +esbonio==0.16.1 + # via dioptra (pyproject.toml) +exceptiongroup==1.1.3 + # via + # anyio + # cattrs + # ipython +executing==1.2.0 + # via stack-data +fastjsonschema==2.18.0 + # via nbformat +filelock==3.12.4 + # via + # tox + # virtualenv +flake8==6.1.0 + # via + # dioptra (pyproject.toml) + # flake8-bugbear +flake8-bugbear==23.9.16 + # via dioptra (pyproject.toml) +flask==2.1.3 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-cors + # flask-migrate + # flask-restx + # flask-sqlalchemy + # flask-wtf + # mlflow + # prometheus-flask-exporter +flask-accepts==0.18.4 + # via + # dioptra + # dioptra (pyproject.toml) +flask-cors==4.0.0 + # via + # dioptra + # dioptra (pyproject.toml) +flask-migrate==4.0.5 + # via + # dioptra + # dioptra (pyproject.toml) +flask-restx==1.1.0 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-accepts +flask-sqlalchemy==2.5.1 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-migrate +flask-wtf==1.1.1 + # via + # dioptra + # dioptra (pyproject.toml) +flatbuffers==23.5.26 + # via tensorflow-cpu-aws +fonttools==4.42.1 + # via matplotlib +fqdn==1.5.1 + # via jsonschema +frozenlist==1.4.0 + # via + # aiohttp + # aiosignal +fsspec==2023.9.2 + # via + # dask + # universal-pathlib +gast==0.4.0 + # via tensorflow-cpu-aws +gitdb==4.0.10 + # via gitpython +gitpython==3.1.37 + # via mlflow +google-auth==2.23.2 + # via + # google-auth-oauthlib + # tensorboard +google-auth-oauthlib==1.0.0 + # via tensorboard +google-pasta==0.2.0 + # via tensorflow-cpu-aws +greenlet==2.0.2 + # via sqlalchemy +grpcio==1.58.0 + # via + # tensorboard + # tensorflow-cpu-aws +gunicorn==21.2.0 + # via mlflow +h5py==3.9.0 + # via tensorflow-cpu-aws +idna==3.4 + # via + # anyio + # email-validator + # jsonschema + # requests + # yarl +imageio==2.31.4 + # via + # imgaug + # scikit-image +imagesize==1.4.1 + # via sphinx +imgaug==0.4.0 + # via dioptra (pyproject.toml) +importlib-metadata==6.8.0 + # via + # build + # dask + # flask + # jax + # jupyter-client + # jupyter-lsp + # jupyterlab + # jupyterlab-server + # markdown + # mlflow + # nbconvert + # sphinx + # typeguard +importlib-resources==6.1.0 + # via + # matplotlib + # prefect +injector==0.21.0 + # via + # dioptra + # dioptra (pyproject.toml) +ipykernel==6.25.2 + # via + # dioptra (pyproject.toml) + # jupyter + # jupyter-console + # jupyterlab + # qtconsole +ipython==8.15.0 + # via + # dioptra (pyproject.toml) + # ipykernel + # ipywidgets + # jupyter-console +ipython-genutils==0.2.0 + # via qtconsole +ipywidgets==8.1.1 + # via jupyter +isoduration==20.11.0 + # via jsonschema +itsdangerous==2.1.2 + # via + # flask + # flask-wtf +jax==0.4.16 + # via tensorflow-cpu-aws +jedi==0.19.0 + # via ipython +jinja2==3.1.2 + # via + # cookiecutter + # distributed + # flask + # jinja2-time + # jupyter-server + # jupyterlab + # jupyterlab-server + # nbconvert + # sphinx +jinja2-time==0.2.0 + # via cookiecutter +jmespath==1.0.1 + # via + # boto3 + # botocore +joblib==1.3.2 + # via scikit-learn +json5==0.9.14 + # via jupyterlab-server +jsonpointer==2.4 + # via jsonschema +jsonschema[format-nongpl]==4.19.1 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-restx + # jupyter-events + # jupyterlab-server + # nbformat +jsonschema-specifications==2023.7.1 + # via jsonschema +jupyter==1.0.0 + # via dioptra (pyproject.toml) +jupyter-client==8.3.1 + # via + # ipykernel + # jupyter-console + # jupyter-server + # nbclient + # qtconsole +jupyter-console==6.6.3 + # via jupyter +jupyter-core==5.3.2 + # via + # ipykernel + # jupyter-client + # jupyter-console + # jupyter-server + # jupyterlab + # nbclient + # nbconvert + # nbformat + # qtconsole +jupyter-events==0.7.0 + # via jupyter-server +jupyter-lsp==2.2.0 + # via jupyterlab +jupyter-server==2.7.3 + # via + # jupyter-lsp + # jupyterlab + # jupyterlab-server + # notebook + # notebook-shim +jupyter-server-terminals==0.4.4 + # via jupyter-server +jupyterlab==4.0.6 + # via + # dioptra (pyproject.toml) + # notebook +jupyterlab-pygments==0.2.2 + # via nbconvert +jupyterlab-server==2.25.0 + # via + # jupyterlab + # notebook +jupyterlab-widgets==3.0.9 + # via ipywidgets +kaggle==1.5.16 + # via dioptra (pyproject.toml) +keras==2.12.0 + # via tensorflow-cpu-aws +kiwisolver==1.4.5 + # via matplotlib +lazy-loader==0.3 + # via scikit-image +libclang==16.0.6 + # via tensorflow-cpu-aws +locket==1.0.0 + # via + # distributed + # partd +lsprotocol==2023.0.0b1 + # via pygls +mako==1.2.4 + # via alembic +markdown==3.4.4 + # via tensorboard +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.3 + # via + # jinja2 + # mako + # nbconvert + # wtforms +marshmallow==3.20.1 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-accepts + # marshmallow-oneofschema + # prefect +marshmallow-oneofschema==3.0.1 + # via prefect +matplotlib==3.8.0 + # via imgaug +matplotlib-inline==0.1.6 + # via + # ipykernel + # ipython +mccabe==0.7.0 + # via flake8 +mdurl==0.1.2 + # via markdown-it-py +mistune==3.0.1 + # via nbconvert +ml-dtypes==0.3.1 + # via jax +mlflow==1.27.0 + # via + # dioptra + # dioptra (pyproject.toml) +msgpack==1.0.7 + # via + # distributed + # prefect +multidict==6.0.4 + # via + # aiohttp + # yarl +multimethod==1.10 + # via + # dioptra + # dioptra (pyproject.toml) +mypy-extensions==1.0.0 + # via prefect +nbclient==0.8.0 + # via nbconvert +nbconvert==7.8.0 + # via + # dioptra (pyproject.toml) + # jupyter + # jupyter-server +nbformat==5.9.2 + # via + # jupyter-server + # nbclient + # nbconvert +nest-asyncio==1.5.8 + # via ipykernel +networkx==3.1 + # via scikit-image +notebook==7.0.4 + # via jupyter +notebook-shim==0.2.3 + # via + # jupyterlab + # notebook +numpy==1.24.3 + # via + # adversarial-robustness-toolbox + # contourpy + # dioptra + # dioptra (pyproject.toml) + # h5py + # imageio + # imgaug + # jax + # matplotlib + # ml-dtypes + # mlflow + # opencv-python + # opt-einsum + # pandas + # pyarrow + # pywavelets + # scikit-image + # scikit-learn + # scipy + # shapely + # tensorboard + # tensorflow-cpu-aws + # tifffile +oauthlib==3.2.2 + # via + # databricks-cli + # requests-oauthlib +opencv-python==4.8.1.78 + # via imgaug +opt-einsum==3.3.0 + # via + # jax + # tensorflow-cpu-aws +overrides==7.4.0 + # via jupyter-server +packaging==23.1 + # via + # build + # dask + # distributed + # docker + # gunicorn + # ipykernel + # jupyter-server + # jupyterlab + # jupyterlab-server + # marshmallow + # matplotlib + # mlflow + # nbconvert + # prefect + # pyproject-api + # qtconsole + # qtpy + # scikit-image + # sphinx + # tensorflow-cpu-aws + # tox +pandas==2.1.1 + # via + # dioptra + # dioptra (pyproject.toml) + # mlflow +pandocfilters==1.5.0 + # via nbconvert +parso==0.8.3 + # via jedi +partd==1.4.1 + # via dask +passlib==1.7.4 + # via + # dioptra + # dioptra (pyproject.toml) +pendulum==2.1.2 + # via prefect +pexpect==4.8.0 + # via ipython +pickleshare==0.7.5 + # via ipython +pillow==10.0.1 + # via + # dioptra (pyproject.toml) + # imageio + # imgaug + # matplotlib + # scikit-image +pip-tools==7.3.0 + # via dioptra (pyproject.toml) +platformdirs==3.10.0 + # via + # jupyter-core + # tox + # virtualenv +pluggy==1.3.0 + # via tox +prefect==1.4.1 + # via dioptra (pyproject.toml) +prometheus-client==0.17.1 + # via + # jupyter-server + # prometheus-flask-exporter +prometheus-flask-exporter==0.22.4 + # via mlflow +prompt-toolkit==3.0.39 + # via + # ipython + # jupyter-console +protobuf==4.24.3 + # via + # mlflow + # tensorboard + # tensorflow-cpu-aws +psutil==5.9.5 + # via + # distributed + # ipykernel +ptyprocess==0.7.0 + # via + # pexpect + # terminado +pure-eval==0.2.2 + # via stack-data +pyarrow==13.0.0 + # via dioptra (pyproject.toml) +pyasn1==0.5.0 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 + # via google-auth +pycodestyle==2.11.0 + # via + # autopep8 + # dioptra (pyproject.toml) + # flake8 +pycparser==2.21 + # via cffi +pydocstyle==6.3.0 + # via dioptra (pyproject.toml) +pyflakes==3.1.0 + # via flake8 +pygls==1.0.2 + # via esbonio +pygments==2.16.1 + # via + # ipython + # jupyter-console + # nbconvert + # qtconsole + # rich + # sphinx +pyjwt==2.8.0 + # via databricks-cli +pyparsing==3.1.1 + # via matplotlib +pyproject-api==1.6.1 + # via tox +pyproject-hooks==1.0.0 + # via build +pyspellchecker==0.7.2 + # via esbonio +python-box==7.1.1 + # via prefect +python-dateutil==2.8.2 + # via + # arrow + # botocore + # croniter + # dioptra + # dioptra (pyproject.toml) + # jupyter-client + # kaggle + # matplotlib + # pandas + # pendulum + # prefect +python-json-logger==2.0.7 + # via jupyter-events +python-slugify==8.0.1 + # via + # cookiecutter + # kaggle + # prefect +pytoml==0.1.21 + # via dioptra (pyproject.toml) +pytz==2023.3.post1 + # via + # flask-restx + # mlflow + # pandas + # prefect +pytzdata==2020.1 + # via pendulum +pywavelets==1.4.1 + # via scikit-image +pyyaml==6.0.1 + # via + # cookiecutter + # dask + # dioptra + # dioptra (pyproject.toml) + # distributed + # jupyter-events + # mlflow + # prefect +pyzmq==25.1.1 + # via + # ipykernel + # jupyter-client + # jupyter-console + # jupyter-server + # qtconsole +qtconsole==5.4.4 + # via jupyter +qtpy==2.4.0 + # via qtconsole +querystring-parser==1.2.4 + # via mlflow +redis==5.0.1 + # via + # dioptra + # dioptra (pyproject.toml) + # rq +referencing==0.30.2 + # via + # jsonschema + # jsonschema-specifications + # jupyter-events +requests==2.31.0 + # via + # cookiecutter + # databricks-cli + # dioptra + # dioptra (pyproject.toml) + # docker + # jupyterlab-server + # kaggle + # mlflow + # prefect + # requests-oauthlib + # sphinx + # tensorboard +requests-oauthlib==1.3.1 + # via google-auth-oauthlib +rfc3339-validator==0.1.4 + # via + # jsonschema + # jupyter-events +rfc3986-validator==0.1.1 + # via + # jsonschema + # jupyter-events +rich==13.5.3 + # via dioptra (pyproject.toml) +rpds-py==0.10.3 + # via + # jsonschema + # referencing +rq==1.15.1 + # via + # dioptra + # dioptra (pyproject.toml) +rsa==4.9 + # via google-auth +s3transfer==0.7.0 + # via boto3 +scikit-image==0.21.0 + # via imgaug +scikit-learn==1.0.2 + # via + # adversarial-robustness-toolbox + # dioptra (pyproject.toml) +scipy==1.11.3 + # via + # adversarial-robustness-toolbox + # dioptra + # dioptra (pyproject.toml) + # imgaug + # jax + # mlflow + # scikit-image + # scikit-learn +send2trash==1.8.2 + # via jupyter-server +shapely==2.0.1 + # via imgaug +six==1.16.0 + # via + # adversarial-robustness-toolbox + # asttokens + # astunparse + # bleach + # databricks-cli + # google-pasta + # imgaug + # kaggle + # python-dateutil + # querystring-parser + # rfc3339-validator + # tensorflow-cpu-aws +smmap==5.0.1 + # via gitdb +sniffio==1.3.0 + # via anyio +snowballstemmer==2.2.0 + # via + # pydocstyle + # sphinx +sortedcontainers==2.4.0 + # via distributed +soupsieve==2.5 + # via beautifulsoup4 +sphinx==4.5.0 + # via + # dioptra (pyproject.toml) + # esbonio +sphinxcontrib-applehelp==1.0.4 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.1 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sqlalchemy==1.4.49 + # via + # alembic + # dioptra + # dioptra (pyproject.toml) + # flask-sqlalchemy + # mlflow +sqlparse==0.4.4 + # via mlflow +stack-data==0.6.2 + # via ipython +structlog==23.1.0 + # via + # dioptra + # dioptra (pyproject.toml) +tabulate==0.9.0 + # via + # databricks-cli + # prefect +tblib==2.0.0 + # via distributed +tensorboard==2.12.3 + # via tensorflow-cpu-aws +tensorboard-data-server==0.7.1 + # via tensorboard +tensorflow-cpu-aws==2.12.1 ; sys_platform == "linux" and (platform_machine == "aarch64" or platform_machine == "arm64") + # via -r requirements-dev-tensorflow.in +tensorflow-estimator==2.12.0 + # via tensorflow-cpu-aws +tensorflow-io-gcs-filesystem==0.34.0 + # via tensorflow-cpu-aws +termcolor==2.3.0 + # via tensorflow-cpu-aws +terminado==0.17.1 + # via + # jupyter-server + # jupyter-server-terminals +text-unidecode==1.3 + # via python-slugify +threadpoolctl==3.2.0 + # via scikit-learn +tifffile==2023.9.26 + # via scikit-image +tinycss2==1.2.1 + # via nbconvert +toml==0.10.2 + # via + # build + # prefect +tomli==2.0.1 + # via + # autopep8 + # build + # dioptra (pyproject.toml) + # jupyterlab + # pip-tools + # pyproject-api + # pyproject-hooks + # tox +toolz==0.12.0 + # via + # dask + # distributed + # partd +tornado==6.3.3 + # via + # distributed + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # notebook + # terminado +tox==4.11.3 + # via dioptra (pyproject.toml) +tqdm==4.66.1 + # via + # adversarial-robustness-toolbox + # kaggle +traitlets==5.10.1 + # via + # comm + # ipykernel + # ipython + # ipywidgets + # jupyter-client + # jupyter-console + # jupyter-core + # jupyter-events + # jupyter-server + # jupyterlab + # matplotlib-inline + # nbclient + # nbconvert + # nbformat + # qtconsole +typeguard==3.0.2 + # via pygls +typing-extensions==4.5.0 + # via + # alembic + # async-lru + # cattrs + # dioptra + # dioptra (pyproject.toml) + # ipython + # tensorflow-cpu-aws + # typeguard +tzdata==2023.3 + # via pandas +universal-pathlib==0.1.3 + # via dioptra (pyproject.toml) +uri-template==1.3.0 + # via jsonschema +urllib3==1.26.16 + # via + # botocore + # databricks-cli + # distributed + # docker + # kaggle + # prefect + # requests +virtualenv==20.24.5 + # via tox +wcwidth==0.2.6 + # via prompt-toolkit +webcolors==1.13 + # via jsonschema +webencodings==0.5.1 + # via + # bleach + # tinycss2 +websocket-client==1.6.3 + # via + # docker + # jupyter-server +werkzeug==2.1.2 + # via + # dioptra + # dioptra (pyproject.toml) + # flask + # flask-accepts + # flask-restx + # tensorboard +wheel==0.41.2 + # via + # astunparse + # dioptra (pyproject.toml) + # pip-tools + # tensorboard +widgetsnbextension==4.0.9 + # via ipywidgets +wrapt==1.14.1 + # via tensorflow-cpu-aws +wtforms[email]==3.0.1 + # via + # dioptra + # dioptra (pyproject.toml) + # flask-wtf +yarl==1.9.2 + # via aiohttp +zict==3.0.0 + # via distributed +zipp==3.17.0 + # via + # importlib-metadata + # importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/requirements/macos-x86_64-py3.9-requirements-dev.txt b/requirements/linux-arm64-py3.9-requirements-dev.txt similarity index 89% rename from requirements/macos-x86_64-py3.9-requirements-dev.txt rename to requirements/linux-arm64-py3.9-requirements-dev.txt index 6892d37fa..b3df11967 100644 --- a/requirements/macos-x86_64-py3.9-requirements-dev.txt +++ b/requirements/linux-arm64-py3.9-requirements-dev.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/macos-x86_64-py3.9-requirements-dev.txt pyproject.toml requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-arm64-py3.9-requirements-dev.txt pyproject.toml requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -14,7 +14,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -22,15 +22,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -appnope==0.1.3 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -38,11 +34,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -54,7 +50,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -70,15 +66,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -90,11 +86,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -102,7 +98,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -123,11 +119,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -137,13 +133,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -151,9 +147,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -170,15 +166,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -186,14 +183,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -208,11 +204,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -230,7 +222,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -238,13 +230,13 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -257,7 +249,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -267,6 +259,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -277,7 +270,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -285,15 +278,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -301,7 +293,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -328,13 +320,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -346,7 +338,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -355,7 +347,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -370,7 +362,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -379,21 +371,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -401,7 +393,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -422,7 +414,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -438,7 +430,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -446,7 +438,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -454,7 +446,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -464,17 +456,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -495,9 +487,9 @@ numpy==1.25.2 # tifffile oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -521,7 +513,7 @@ packaging==23.1 # scikit-image # sphinx # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -530,7 +522,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -542,21 +534,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -570,7 +562,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -582,7 +574,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -597,7 +589,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -607,15 +599,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -639,7 +631,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -659,25 +651,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -702,9 +694,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -712,7 +704,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -720,7 +712,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -744,7 +736,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -754,7 +746,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -801,7 +793,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -824,7 +816,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -833,13 +825,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -858,7 +850,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -869,7 +861,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -882,7 +874,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -892,7 +884,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -903,11 +895,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -918,7 +910,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/linux-aarch64-py3.9-requirements-dev-pytorch.txt b/requirements/macos-amd64-py3.9-requirements-dev-pytorch.txt similarity index 90% rename from requirements/linux-aarch64-py3.9-requirements-dev-pytorch.txt rename to requirements/macos-amd64-py3.9-requirements-dev-pytorch.txt index 6b3d292b4..08753cd6b 100644 --- a/requirements/linux-aarch64-py3.9-requirements-dev-pytorch.txt +++ b/requirements/macos-amd64-py3.9-requirements-dev-pytorch.txt @@ -2,14 +2,14 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-aarch64-py3.9-requirements-dev-pytorch.txt pyproject.toml requirements-dev-pytorch.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/macos-amd64-py3.9-requirements-dev-pytorch.txt pyproject.toml requirements-dev-pytorch.in requirements-dev.in # --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via @@ -19,7 +19,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -27,11 +27,15 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +appnope==0.1.3 + # via + # ipykernel + # ipython +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -39,11 +43,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -55,7 +59,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -71,15 +75,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -91,11 +95,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -103,7 +107,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -124,11 +128,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -138,13 +142,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -152,9 +156,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -171,15 +175,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -187,14 +192,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -209,11 +213,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -231,7 +231,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -239,14 +239,14 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -259,7 +259,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -269,6 +269,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -279,7 +280,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -287,15 +288,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -303,7 +303,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -330,13 +330,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -348,7 +348,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -357,7 +357,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -372,7 +372,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -381,21 +381,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -407,7 +407,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -428,7 +428,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -444,7 +444,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -452,7 +452,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -460,7 +460,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -470,17 +470,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -504,9 +504,9 @@ numpy==1.25.2 # torchvision oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -531,9 +531,8 @@ packaging==23.1 # qtpy # scikit-image # sphinx - # torchmetrics # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -542,7 +541,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -554,7 +553,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio @@ -562,14 +561,14 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -583,7 +582,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -595,7 +594,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -610,7 +609,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -620,15 +619,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -654,7 +653,7 @@ pytoml==0.1.21 # via dioptra (pyproject.toml) pytorch-lightning==1.9.5 ; python_version == "3.9" # via -r requirements-dev-pytorch.in -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -675,25 +674,25 @@ pyyaml==6.0.1 # mlflow # prefect # pytorch-lightning -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -719,9 +718,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -729,7 +728,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -737,7 +736,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -761,7 +760,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -771,7 +770,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -818,7 +817,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -850,11 +849,11 @@ torch==1.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_pl # torchvision torchaudio==0.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -863,14 +862,14 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle # pytorch-lightning -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -889,7 +888,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -903,7 +902,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -916,7 +915,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -926,7 +925,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -937,11 +936,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -952,7 +951,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/linux-x86_64-py3.9-requirements-dev-tensorflow.txt b/requirements/macos-amd64-py3.9-requirements-dev-tensorflow.txt similarity index 89% rename from requirements/linux-x86_64-py3.9-requirements-dev-tensorflow.txt rename to requirements/macos-amd64-py3.9-requirements-dev-tensorflow.txt index b0c13284f..3f8fb9916 100644 --- a/requirements/linux-x86_64-py3.9-requirements-dev-tensorflow.txt +++ b/requirements/macos-amd64-py3.9-requirements-dev-tensorflow.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-requirements-dev-tensorflow.txt pyproject.toml requirements-dev-tensorflow.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/macos-amd64-py3.9-requirements-dev-tensorflow.txt pyproject.toml requirements-dev-tensorflow.in requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -absl-py==1.4.0 +absl-py==2.0.0 # via # tensorboard # tensorflow-cpu -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -26,11 +26,15 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +appnope==0.1.3 + # via + # ipykernel + # ipython +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -38,13 +42,13 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data astunparse==1.6.3 # via tensorflow-cpu async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -56,7 +60,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -72,15 +76,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -94,11 +98,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -106,7 +110,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -127,11 +131,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -141,13 +145,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -155,9 +159,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -174,15 +178,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -190,14 +195,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -212,11 +216,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -236,7 +236,7 @@ flask-wtf==1.1.1 # dioptra (pyproject.toml) flatbuffers==23.5.26 # via tensorflow-cpu -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -244,7 +244,7 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib @@ -252,9 +252,9 @@ gast==0.4.0 # via tensorflow-cpu gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -264,7 +264,7 @@ google-pasta==0.2.0 # via tensorflow-cpu greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via # tensorboard # tensorflow-cpu @@ -279,7 +279,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -289,6 +289,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jax @@ -301,7 +302,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -309,15 +310,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -325,7 +325,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -333,7 +333,7 @@ itsdangerous==2.1.2 # via # flask # flask-wtf -jax==0.4.14 +jax==0.4.16 # via tensorflow-cpu jedi==0.19.0 # via ipython @@ -354,13 +354,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -372,7 +372,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -381,7 +381,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -396,7 +396,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -405,23 +405,23 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) keras==2.12.0 # via tensorflow-cpu -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -431,7 +431,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -454,7 +454,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -466,13 +466,13 @@ mdurl==0.1.2 # via markdown-it-py mistune==3.0.1 # via nbconvert -ml-dtypes==0.2.0 +ml-dtypes==0.3.1 # via jax mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -480,7 +480,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -488,7 +488,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -498,11 +498,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via @@ -537,13 +537,13 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug opt-einsum==3.3.0 # via # jax # tensorflow-cpu -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -568,7 +568,7 @@ packaging==23.1 # sphinx # tensorflow-cpu # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -577,7 +577,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -589,21 +589,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -617,7 +617,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard @@ -632,7 +632,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -653,7 +653,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -663,15 +663,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -695,7 +695,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -715,25 +715,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -762,9 +762,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -774,7 +774,7 @@ rq==1.15.1 # dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -782,7 +782,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -803,7 +803,6 @@ six==1.16.0 # astunparse # bleach # databricks-cli - # google-auth # google-pasta # imgaug # kaggle @@ -811,7 +810,7 @@ six==1.16.0 # querystring-parser # rfc3339-validator # tensorflow-cpu -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -821,7 +820,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -868,7 +867,7 @@ tensorflow-cpu==2.12.1 ; (sys_platform == "linux" or sys_platform == "win32" or # via -r requirements-dev-tensorflow.in tensorflow-estimator==2.12.0 # via tensorflow-cpu -tensorflow-io-gcs-filesystem==0.32.0 +tensorflow-io-gcs-filesystem==0.34.0 # via tensorflow-cpu termcolor==2.3.0 # via tensorflow-cpu @@ -880,7 +879,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -903,7 +902,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -912,13 +911,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -949,7 +948,7 @@ typing-extensions==4.5.0 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -959,11 +958,10 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -973,7 +971,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -985,13 +983,13 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # astunparse # dioptra (pyproject.toml) # pip-tools # tensorboard -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wrapt==1.14.1 # via tensorflow-cpu @@ -1004,7 +1002,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/linux-x86_64-py3.9-requirements-dev.txt b/requirements/macos-amd64-py3.9-requirements-dev.txt similarity index 89% rename from requirements/linux-x86_64-py3.9-requirements-dev.txt rename to requirements/macos-amd64-py3.9-requirements-dev.txt index 1a7c57ece..9d0c411a9 100644 --- a/requirements/linux-x86_64-py3.9-requirements-dev.txt +++ b/requirements/macos-amd64-py3.9-requirements-dev.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/linux-x86_64-py3.9-requirements-dev.txt pyproject.toml requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file=venvs/macos-amd64-py3.9-requirements-dev.txt pyproject.toml requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -14,7 +14,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -22,11 +22,15 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +appnope==0.1.3 + # via + # ipykernel + # ipython +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -34,11 +38,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -50,7 +54,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -66,15 +70,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -86,11 +90,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -98,7 +102,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -119,11 +123,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -133,13 +137,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -147,9 +151,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -166,15 +170,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -182,14 +187,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -204,11 +208,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -226,7 +226,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -234,13 +234,13 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -253,7 +253,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -263,6 +263,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -273,7 +274,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -281,15 +282,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -297,7 +297,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -324,13 +324,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -342,7 +342,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -351,7 +351,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -366,7 +366,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -375,21 +375,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -397,7 +397,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -418,7 +418,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -434,7 +434,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -442,7 +442,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -450,7 +450,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -460,17 +460,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -491,9 +491,9 @@ numpy==1.25.2 # tifffile oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -517,7 +517,7 @@ packaging==23.1 # scikit-image # sphinx # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -526,7 +526,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -538,21 +538,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -566,7 +566,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -578,7 +578,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -593,7 +593,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -603,15 +603,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -635,7 +635,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -655,25 +655,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -698,9 +698,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -708,7 +708,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -716,7 +716,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -740,7 +740,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -750,7 +750,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -797,7 +797,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -820,7 +820,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -829,13 +829,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -854,7 +854,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -865,7 +865,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -878,7 +878,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -888,7 +888,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -899,11 +899,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -914,7 +914,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/macos-arm64-py3.9-requirements-dev-pytorch.txt b/requirements/macos-arm64-py3.9-requirements-dev-pytorch.txt index acf537f61..25983ba24 100644 --- a/requirements/macos-arm64-py3.9-requirements-dev-pytorch.txt +++ b/requirements/macos-arm64-py3.9-requirements-dev-pytorch.txt @@ -9,7 +9,7 @@ -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via @@ -19,7 +19,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -27,7 +27,7 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio @@ -35,7 +35,7 @@ appnope==0.1.3 # via # ipykernel # ipython -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -43,11 +43,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -59,7 +59,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -75,15 +75,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -95,11 +95,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -107,7 +107,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -128,11 +128,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -142,13 +142,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -156,9 +156,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -175,15 +175,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -191,14 +192,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -213,11 +213,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -235,7 +231,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -243,14 +239,14 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow gunicorn==21.2.0 # via mlflow @@ -261,7 +257,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -271,6 +267,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -281,7 +278,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -289,15 +286,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -305,7 +301,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -332,13 +328,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -350,7 +346,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -359,7 +355,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -374,7 +370,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -383,21 +379,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -409,7 +405,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -430,7 +426,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -446,7 +442,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -454,7 +450,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -462,7 +458,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -472,17 +468,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -506,9 +502,9 @@ numpy==1.25.2 # torchvision oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -533,9 +529,8 @@ packaging==23.1 # qtpy # scikit-image # sphinx - # torchmetrics # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -544,7 +539,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -556,7 +551,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio @@ -564,14 +559,14 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -585,7 +580,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -597,7 +592,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -612,7 +607,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -622,15 +617,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -656,7 +651,7 @@ pytoml==0.1.21 # via dioptra (pyproject.toml) pytorch-lightning==1.9.5 ; python_version == "3.9" # via -r requirements-dev-pytorch.in -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -677,25 +672,25 @@ pyyaml==6.0.1 # mlflow # prefect # pytorch-lightning -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -721,9 +716,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -731,7 +726,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -739,7 +734,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -763,7 +758,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -773,7 +768,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -820,7 +815,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -852,11 +847,11 @@ torch==1.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_pl # torchvision torchaudio==0.10.2 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3 ; python_version == "3.9" and (sys_platform == "darwin" or (sys_platform == "linux" and platform_machine == "aarch64")) # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -865,14 +860,14 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle # pytorch-lightning -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -891,7 +886,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -905,7 +900,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -918,7 +913,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -928,7 +923,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -939,11 +934,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -954,7 +949,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/macos-arm64-py3.9-requirements-dev-tensorflow.txt b/requirements/macos-arm64-py3.9-requirements-dev-tensorflow.txt index 2428aeca3..3ba9c0f96 100644 --- a/requirements/macos-arm64-py3.9-requirements-dev-tensorflow.txt +++ b/requirements/macos-arm64-py3.9-requirements-dev-tensorflow.txt @@ -6,11 +6,11 @@ # -e file:.#egg=dioptra # via -r requirements-dev.in -absl-py==1.4.0 +absl-py==2.0.0 # via # tensorboard # tensorflow-macos -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -26,7 +26,7 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio @@ -34,7 +34,7 @@ appnope==0.1.3 # via # ipykernel # ipython -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -42,13 +42,13 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data astunparse==1.6.3 # via tensorflow-macos async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -60,7 +60,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -76,15 +76,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -98,11 +98,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -110,7 +110,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -131,11 +131,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -145,13 +145,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -159,9 +159,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -178,15 +178,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -194,14 +195,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -216,11 +216,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -240,7 +236,7 @@ flask-wtf==1.1.1 # dioptra (pyproject.toml) flatbuffers==23.5.26 # via tensorflow-macos -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -248,7 +244,7 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib @@ -256,9 +252,9 @@ gast==0.4.0 # via tensorflow-macos gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -266,7 +262,7 @@ google-auth-oauthlib==1.0.0 # via tensorboard google-pasta==0.2.0 # via tensorflow-macos -grpcio==1.56.2 +grpcio==1.58.0 # via # tensorboard # tensorflow-macos @@ -281,7 +277,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -291,6 +287,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jax @@ -303,7 +300,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -311,15 +308,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -327,7 +323,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -335,7 +331,7 @@ itsdangerous==2.1.2 # via # flask # flask-wtf -jax==0.4.14 +jax==0.4.16 # via tensorflow-macos jedi==0.19.0 # via ipython @@ -356,13 +352,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -374,7 +370,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -383,7 +379,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -398,7 +394,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -407,23 +403,23 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) keras==2.12.0 # via tensorflow-macos -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -433,7 +429,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -456,7 +452,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -468,13 +464,13 @@ mdurl==0.1.2 # via markdown-it-py mistune==3.0.1 # via nbconvert -ml-dtypes==0.2.0 +ml-dtypes==0.3.1 # via jax mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -482,7 +478,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -490,7 +486,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -500,11 +496,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via @@ -539,13 +535,13 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug opt-einsum==3.3.0 # via # jax # tensorflow-macos -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -570,7 +566,7 @@ packaging==23.1 # sphinx # tensorflow-macos # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -579,7 +575,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -591,21 +587,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -619,7 +615,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard @@ -634,7 +630,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -655,7 +651,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -665,15 +661,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -697,7 +693,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -717,25 +713,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -764,9 +760,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -776,7 +772,7 @@ rq==1.15.1 # dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -784,7 +780,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -805,7 +801,6 @@ six==1.16.0 # astunparse # bleach # databricks-cli - # google-auth # google-pasta # imgaug # kaggle @@ -814,7 +809,7 @@ six==1.16.0 # rfc3339-validator # tensorflow-macos # tensorflow-metal -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -824,7 +819,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -883,7 +878,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -906,7 +901,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -915,13 +910,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -940,7 +935,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -952,7 +947,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -962,11 +957,10 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -976,7 +970,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -988,14 +982,14 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # astunparse # dioptra (pyproject.toml) # pip-tools # tensorboard # tensorflow-metal -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wrapt==1.14.1 # via tensorflow-macos @@ -1008,7 +1002,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/macos-arm64-py3.9-requirements-dev.txt b/requirements/macos-arm64-py3.9-requirements-dev.txt index 1028b3f76..96de8e281 100644 --- a/requirements/macos-arm64-py3.9-requirements-dev.txt +++ b/requirements/macos-arm64-py3.9-requirements-dev.txt @@ -6,7 +6,7 @@ # -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -14,7 +14,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -22,7 +22,7 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio @@ -30,7 +30,7 @@ appnope==0.1.3 # via # ipykernel # ipython -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -38,11 +38,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -54,7 +54,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -70,15 +70,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -90,11 +90,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -102,7 +102,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -123,11 +123,11 @@ cloudpickle==2.2.1 # prefect colorama==0.4.6 # via tox -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -137,13 +137,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -151,9 +151,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -170,15 +170,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -186,14 +187,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -208,11 +208,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -230,7 +226,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -238,13 +234,13 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow gunicorn==21.2.0 # via mlflow @@ -255,7 +251,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -265,6 +261,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -275,7 +272,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -283,15 +280,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -299,7 +295,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -326,13 +322,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -344,7 +340,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -353,7 +349,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -368,7 +364,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -377,21 +373,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -399,7 +395,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -420,7 +416,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -436,7 +432,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -444,7 +440,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -452,7 +448,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -462,17 +458,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -493,9 +489,9 @@ numpy==1.25.2 # tifffile oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -519,7 +515,7 @@ packaging==23.1 # scikit-image # sphinx # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -528,7 +524,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -540,21 +536,21 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -568,7 +564,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -580,7 +576,7 @@ ptyprocess==0.7.0 # terminado pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -595,7 +591,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -605,15 +601,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -637,7 +633,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -657,25 +653,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -700,9 +696,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -710,7 +706,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -718,7 +714,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -742,7 +738,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -752,7 +748,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -799,7 +795,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -822,7 +818,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -831,13 +827,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -856,7 +852,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -867,7 +863,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -880,7 +876,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox wcwidth==0.2.6 # via prompt-toolkit @@ -890,7 +886,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -901,11 +897,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -916,7 +912,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/win-x86_64-py3.9-requirements-dev-pytorch.txt b/requirements/win-amd64-py3.9-requirements-dev-pytorch.txt similarity index 90% rename from requirements/win-x86_64-py3.9-requirements-dev-pytorch.txt rename to requirements/win-amd64-py3.9-requirements-dev-pytorch.txt index 264c4cc1e..21e4e34f2 100644 --- a/requirements/win-x86_64-py3.9-requirements-dev-pytorch.txt +++ b/requirements/win-amd64-py3.9-requirements-dev-pytorch.txt @@ -2,14 +2,14 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file='venvs\win-x86_64-py3.9-requirements-dev-pytorch.txt' pyproject.toml requirements-dev-pytorch.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file='venvs\win-amd64-py3.9-requirements-dev-pytorch.txt' pyproject.toml requirements-dev-pytorch.in requirements-dev.in # --find-links https://download.pytorch.org/whl/cpu/torch_stable.html --find-links https://dl.fbaipublicfiles.com/detectron2/wheels/cpu/torch1.10/index.html -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via @@ -19,7 +19,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -27,11 +27,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -39,11 +39,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -55,7 +55,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -71,15 +71,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -91,11 +91,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -103,7 +103,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -130,11 +130,11 @@ colorama==0.4.6 # sphinx # tox # tqdm -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -144,13 +144,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -158,9 +158,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -177,15 +177,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -193,14 +194,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -215,11 +215,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -237,7 +233,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -245,14 +241,14 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec[http]==2023.6.0 +fsspec[http]==2023.9.2 # via # dask # pytorch-lightning # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -263,7 +259,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -273,6 +269,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -283,7 +280,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -291,15 +288,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -307,7 +303,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -334,13 +330,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -352,7 +348,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -361,7 +357,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -376,7 +372,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -385,21 +381,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -411,7 +407,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -432,7 +428,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -448,7 +444,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -456,7 +452,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -464,7 +460,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -474,17 +470,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -508,9 +504,9 @@ numpy==1.25.2 # torchvision oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -534,9 +530,8 @@ packaging==23.1 # qtpy # scikit-image # sphinx - # torchmetrics # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -545,7 +540,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -555,7 +550,7 @@ pendulum==2.1.2 # via prefect pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio @@ -563,14 +558,14 @@ pillow==10.0.0 # matplotlib # scikit-image # torchvision -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -584,7 +579,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -592,7 +587,7 @@ psutil==5.9.5 # ipykernel pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -607,7 +602,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -617,15 +612,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -651,7 +646,7 @@ pytoml==0.1.21 # via dioptra (pyproject.toml) pytorch-lightning==1.9.5 ; python_version == "3.9" # via -r requirements-dev-pytorch.in -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -681,25 +676,25 @@ pyyaml==6.0.1 # mlflow # prefect # pytorch-lightning -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -725,9 +720,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -735,7 +730,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -743,7 +738,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -767,7 +762,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -777,7 +772,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -824,7 +819,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -856,11 +851,11 @@ torch==1.10.2+cpu ; (sys_platform == "win32" or sys_platform == "linux") and pyt # torchvision torchaudio==0.10.2+cpu ; (sys_platform == "win32" or sys_platform == "linux") and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch.in -torchmetrics==1.0.1 +torchmetrics==1.2.0 # via pytorch-lightning torchvision==0.11.3+cpu ; (sys_platform == "win32" or sys_platform == "linux") and python_version == "3.9" and (platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64") # via -r requirements-dev-pytorch.in -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -869,14 +864,14 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle # pytorch-lightning -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -895,7 +890,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -909,7 +904,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -922,7 +917,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox waitress==2.1.2 # via mlflow @@ -934,7 +929,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -945,11 +940,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -960,7 +955,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/win-x86_64-py3.9-requirements-dev-tensorflow.txt b/requirements/win-amd64-py3.9-requirements-dev-tensorflow.txt similarity index 90% rename from requirements/win-x86_64-py3.9-requirements-dev-tensorflow.txt rename to requirements/win-amd64-py3.9-requirements-dev-tensorflow.txt index 5ac897296..c9762afef 100644 --- a/requirements/win-x86_64-py3.9-requirements-dev-tensorflow.txt +++ b/requirements/win-amd64-py3.9-requirements-dev-tensorflow.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file='venvs\win-x86_64-py3.9-requirements-dev-tensorflow.txt' pyproject.toml requirements-dev-tensorflow.in requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file='venvs\win-amd64-py3.9-requirements-dev-tensorflow.txt' pyproject.toml requirements-dev-tensorflow.in requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -absl-py==1.4.0 +absl-py==2.0.0 # via # tensorboard # tensorflow-intel -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -26,11 +26,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -38,13 +38,13 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data astunparse==1.6.3 # via tensorflow-intel async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -56,7 +56,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -72,15 +72,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -94,11 +94,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -106,7 +106,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -133,11 +133,11 @@ colorama==0.4.6 # sphinx # tox # tqdm -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -147,13 +147,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -161,9 +161,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -180,15 +180,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -196,14 +197,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -218,11 +218,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -242,7 +238,7 @@ flask-wtf==1.1.1 # dioptra (pyproject.toml) flatbuffers==23.5.26 # via tensorflow-intel -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -250,7 +246,7 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib @@ -258,9 +254,9 @@ gast==0.4.0 # via tensorflow-intel gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow -google-auth==2.22.0 +google-auth==2.23.2 # via # google-auth-oauthlib # tensorboard @@ -270,7 +266,7 @@ google-pasta==0.2.0 # via tensorflow-intel greenlet==2.0.2 # via sqlalchemy -grpcio==1.56.2 +grpcio==1.58.0 # via # tensorboard # tensorflow-intel @@ -283,7 +279,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -293,6 +289,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jax @@ -305,7 +302,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -313,15 +310,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -329,7 +325,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -337,7 +333,7 @@ itsdangerous==2.1.2 # via # flask # flask-wtf -jax==0.4.14 +jax==0.4.16 # via tensorflow-intel jedi==0.19.0 # via ipython @@ -358,13 +354,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -376,7 +372,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -385,7 +381,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -400,7 +396,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -409,23 +405,23 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) keras==2.12.0 # via tensorflow-intel -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -435,7 +431,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -458,7 +454,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -470,13 +466,13 @@ mdurl==0.1.2 # via markdown-it-py mistune==3.0.1 # via nbconvert -ml-dtypes==0.2.0 +ml-dtypes==0.3.1 # via jax mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -484,7 +480,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -492,7 +488,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -502,11 +498,11 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via @@ -541,13 +537,13 @@ oauthlib==3.2.2 # via # databricks-cli # requests-oauthlib -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug opt-einsum==3.3.0 # via # jax # tensorflow-intel -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -571,7 +567,7 @@ packaging==23.1 # sphinx # tensorflow-intel # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -580,7 +576,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -590,21 +586,21 @@ pendulum==2.1.2 # via prefect pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -618,7 +614,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via # mlflow # tensorboard @@ -629,7 +625,7 @@ psutil==5.9.5 # ipykernel pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pyasn1==0.5.0 # via @@ -650,7 +646,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -660,15 +656,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -692,7 +688,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -721,25 +717,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -768,9 +764,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -780,7 +776,7 @@ rq==1.15.1 # dioptra (pyproject.toml) rsa==4.9 # via google-auth -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -788,7 +784,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -809,7 +805,6 @@ six==1.16.0 # astunparse # bleach # databricks-cli - # google-auth # google-pasta # imgaug # kaggle @@ -817,7 +812,7 @@ six==1.16.0 # querystring-parser # rfc3339-validator # tensorflow-intel -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -827,7 +822,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -888,7 +883,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -911,7 +906,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -920,13 +915,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -957,7 +952,7 @@ typing-extensions==4.5.0 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -967,11 +962,10 @@ urllib3==1.26.16 # databricks-cli # distributed # docker - # google-auth # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox waitress==2.1.2 # via mlflow @@ -983,7 +977,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -995,13 +989,13 @@ werkzeug==2.1.2 # flask-accepts # flask-restx # tensorboard -wheel==0.41.0 +wheel==0.41.2 # via # astunparse # dioptra (pyproject.toml) # pip-tools # tensorboard -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wrapt==1.14.1 # via tensorflow-intel @@ -1014,7 +1008,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/requirements/win-x86_64-py3.9-requirements-dev.txt b/requirements/win-amd64-py3.9-requirements-dev.txt similarity index 89% rename from requirements/win-x86_64-py3.9-requirements-dev.txt rename to requirements/win-amd64-py3.9-requirements-dev.txt index b7b216dcd..60cf4be50 100644 --- a/requirements/win-x86_64-py3.9-requirements-dev.txt +++ b/requirements/win-amd64-py3.9-requirements-dev.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file='venvs\win-x86_64-py3.9-requirements-dev.txt' pyproject.toml requirements-dev.in +# pip-compile --extra=cookiecutter --extra=dev --extra=examples --extra=sdk --extra=taskplugins --output-file='venvs\win-amd64-py3.9-requirements-dev.txt' pyproject.toml requirements-dev.in # -e file:.#egg=dioptra # via -r requirements-dev.in -adversarial-robustness-toolbox==1.15.0 +adversarial-robustness-toolbox==1.16.0 # via dioptra (pyproject.toml) aiohttp==3.8.5 # via dioptra (pyproject.toml) @@ -14,7 +14,7 @@ aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 # via sphinx -alembic==1.11.1 +alembic==1.12.0 # via # dioptra # dioptra (pyproject.toml) @@ -22,11 +22,11 @@ alembic==1.11.1 # mlflow aniso8601==9.0.1 # via flask-restx -anyio==3.7.1 +anyio==4.0.0 # via jupyter-server appdirs==1.4.4 # via esbonio -argon2-cffi==21.3.0 +argon2-cffi==23.1.0 # via jupyter-server argon2-cffi-bindings==21.2.0 # via argon2-cffi @@ -34,11 +34,11 @@ arrow==1.2.3 # via # isoduration # jinja2-time -asttokens==2.2.1 +asttokens==2.4.0 # via stack-data async-lru==2.0.4 # via jupyterlab -async-timeout==4.0.2 +async-timeout==4.0.3 # via # aiohttp # redis @@ -50,7 +50,7 @@ attrs==23.1.0 # jsonschema # lsprotocol # referencing -autopep8==2.0.2 +autopep8==2.0.4 # via dioptra (pyproject.toml) babel==2.12.1 # via @@ -66,15 +66,15 @@ bleach==6.0.0 # via # kaggle # nbconvert -boto3==1.28.16 +boto3==1.28.57 # via # dioptra # dioptra (pyproject.toml) -botocore==1.31.16 +botocore==1.31.57 # via # boto3 # s3transfer -build==0.10.0 +build==1.0.3 # via # dioptra (pyproject.toml) # pip-tools @@ -86,11 +86,11 @@ certifi==2023.7.22 # via # kaggle # requests -cffi==1.15.1 +cffi==1.16.0 # via # argon2-cffi-bindings # cryptography -chardet==5.1.0 +chardet==5.2.0 # via # binaryornot # tox @@ -98,7 +98,7 @@ charset-normalizer==3.2.0 # via # aiohttp # requests -click==8.1.6 +click==8.1.7 # via # cookiecutter # dask @@ -125,11 +125,11 @@ colorama==0.4.6 # sphinx # tox # tqdm -comm==0.1.3 +comm==0.1.4 # via # ipykernel # ipywidgets -contourpy==1.1.0 +contourpy==1.1.1 # via matplotlib cookiecutter==2.1.1 # via dioptra (pyproject.toml) @@ -139,13 +139,13 @@ cryptography==3.4.8 # via dioptra (pyproject.toml) cycler==0.11.0 # via matplotlib -dask==2023.7.1 +dask==2023.9.2 # via # distributed # prefect -databricks-cli==0.17.7 +databricks-cli==0.17.8 # via mlflow -debugpy==1.6.7 +debugpy==1.8.0 # via ipykernel decorator==5.1.1 # via ipython @@ -153,9 +153,9 @@ defusedxml==0.7.1 # via nbconvert distlib==0.3.7 # via virtualenv -distributed==2023.7.1 +distributed==2023.9.2 # via prefect -dnspython==2.4.1 +dnspython==2.4.2 # via email-validator docker==6.1.3 # via @@ -172,15 +172,16 @@ entrypoints==0.4 # mlflow esbonio==0.16.1 # via dioptra (pyproject.toml) -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via # anyio # cattrs + # ipython executing==1.2.0 # via stack-data fastjsonschema==2.18.0 # via nbformat -filelock==3.12.2 +filelock==3.12.4 # via # tox # virtualenv @@ -188,14 +189,13 @@ flake8==6.1.0 # via # dioptra (pyproject.toml) # flake8-bugbear -flake8-bugbear==23.7.10 +flake8-bugbear==23.9.16 # via dioptra (pyproject.toml) flask==2.1.3 # via # dioptra # dioptra (pyproject.toml) # flask-cors - # flask-injector # flask-migrate # flask-restx # flask-sqlalchemy @@ -210,11 +210,7 @@ flask-cors==4.0.0 # via # dioptra # dioptra (pyproject.toml) -flask-injector==0.14.0 - # via - # dioptra - # dioptra (pyproject.toml) -flask-migrate==4.0.4 +flask-migrate==4.0.5 # via # dioptra # dioptra (pyproject.toml) @@ -232,7 +228,7 @@ flask-wtf==1.1.1 # via # dioptra # dioptra (pyproject.toml) -fonttools==4.41.1 +fonttools==4.42.1 # via matplotlib fqdn==1.5.1 # via jsonschema @@ -240,13 +236,13 @@ frozenlist==1.4.0 # via # aiohttp # aiosignal -fsspec==2023.6.0 +fsspec==2023.9.2 # via # dask # universal-pathlib gitdb==4.0.10 # via gitpython -gitpython==3.1.32 +gitpython==3.1.37 # via mlflow greenlet==2.0.2 # via sqlalchemy @@ -257,7 +253,7 @@ idna==3.4 # jsonschema # requests # yarl -imageio==2.31.1 +imageio==2.31.4 # via # imgaug # scikit-image @@ -267,6 +263,7 @@ imgaug==0.4.0 # via dioptra (pyproject.toml) importlib-metadata==6.8.0 # via + # build # dask # flask # jupyter-client @@ -277,7 +274,7 @@ importlib-metadata==6.8.0 # nbconvert # sphinx # typeguard -importlib-resources==6.0.0 +importlib-resources==6.1.0 # via # matplotlib # prefect @@ -285,15 +282,14 @@ injector==0.21.0 # via # dioptra # dioptra (pyproject.toml) - # flask-injector -ipykernel==6.25.0 +ipykernel==6.25.2 # via # dioptra (pyproject.toml) # jupyter # jupyter-console # jupyterlab # qtconsole -ipython==8.14.0 +ipython==8.15.0 # via # dioptra (pyproject.toml) # ipykernel @@ -301,7 +297,7 @@ ipython==8.14.0 # jupyter-console ipython-genutils==0.2.0 # via qtconsole -ipywidgets==8.1.0 +ipywidgets==8.1.1 # via jupyter isoduration==20.11.0 # via jsonschema @@ -328,13 +324,13 @@ jmespath==1.0.1 # via # boto3 # botocore -joblib==1.3.1 +joblib==1.3.2 # via scikit-learn json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.18.4 +jsonschema[format-nongpl]==4.19.1 # via # dioptra # dioptra (pyproject.toml) @@ -346,7 +342,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema jupyter==1.0.0 # via dioptra (pyproject.toml) -jupyter-client==8.3.0 +jupyter-client==8.3.1 # via # ipykernel # jupyter-console @@ -355,7 +351,7 @@ jupyter-client==8.3.0 # qtconsole jupyter-console==6.6.3 # via jupyter -jupyter-core==5.3.1 +jupyter-core==5.3.2 # via # ipykernel # jupyter-client @@ -370,7 +366,7 @@ jupyter-events==0.7.0 # via jupyter-server jupyter-lsp==2.2.0 # via jupyterlab -jupyter-server==2.7.0 +jupyter-server==2.7.3 # via # jupyter-lsp # jupyterlab @@ -379,21 +375,21 @@ jupyter-server==2.7.0 # notebook-shim jupyter-server-terminals==0.4.4 # via jupyter-server -jupyterlab==4.0.3 +jupyterlab==4.0.6 # via # dioptra (pyproject.toml) # notebook jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-server==2.24.0 +jupyterlab-server==2.25.0 # via # jupyterlab # notebook -jupyterlab-widgets==3.0.8 +jupyterlab-widgets==3.0.9 # via ipywidgets kaggle==1.5.16 # via dioptra (pyproject.toml) -kiwisolver==1.4.4 +kiwisolver==1.4.5 # via matplotlib lazy-loader==0.3 # via scikit-image @@ -401,7 +397,7 @@ locket==1.0.0 # via # distributed # partd -lsprotocol==2023.0.0a2 +lsprotocol==2023.0.0b1 # via pygls mako==1.2.4 # via alembic @@ -422,7 +418,7 @@ marshmallow==3.20.1 # prefect marshmallow-oneofschema==3.0.1 # via prefect -matplotlib==3.7.2 +matplotlib==3.8.0 # via imgaug matplotlib-inline==0.1.6 # via @@ -438,7 +434,7 @@ mlflow==1.27.0 # via # dioptra # dioptra (pyproject.toml) -msgpack==1.0.5 +msgpack==1.0.7 # via # distributed # prefect @@ -446,7 +442,7 @@ multidict==6.0.4 # via # aiohttp # yarl -multimethod==1.9.1 +multimethod==1.10 # via # dioptra # dioptra (pyproject.toml) @@ -454,7 +450,7 @@ mypy-extensions==1.0.0 # via prefect nbclient==0.8.0 # via nbconvert -nbconvert==7.7.3 +nbconvert==7.8.0 # via # dioptra (pyproject.toml) # jupyter @@ -464,17 +460,17 @@ nbformat==5.9.2 # jupyter-server # nbclient # nbconvert -nest-asyncio==1.5.7 +nest-asyncio==1.5.8 # via ipykernel networkx==3.1 # via scikit-image -notebook==7.0.1 +notebook==7.0.4 # via jupyter notebook-shim==0.2.3 # via # jupyterlab # notebook -numpy==1.25.2 +numpy==1.26.0 # via # adversarial-robustness-toolbox # contourpy @@ -495,9 +491,9 @@ numpy==1.25.2 # tifffile oauthlib==3.2.2 # via databricks-cli -opencv-python==4.8.0.74 +opencv-python==4.8.1.78 # via imgaug -overrides==7.3.1 +overrides==7.4.0 # via jupyter-server packaging==23.1 # via @@ -520,7 +516,7 @@ packaging==23.1 # scikit-image # sphinx # tox -pandas==2.0.3 +pandas==2.1.1 # via # dioptra # dioptra (pyproject.toml) @@ -529,7 +525,7 @@ pandocfilters==1.5.0 # via nbconvert parso==0.8.3 # via jedi -partd==1.4.0 +partd==1.4.1 # via dask passlib==1.7.4 # via @@ -539,21 +535,21 @@ pendulum==2.1.2 # via prefect pickleshare==0.7.5 # via ipython -pillow==10.0.0 +pillow==10.0.1 # via # dioptra (pyproject.toml) # imageio # imgaug # matplotlib # scikit-image -pip-tools==7.1.0 +pip-tools==7.3.0 # via dioptra (pyproject.toml) platformdirs==3.10.0 # via # jupyter-core # tox # virtualenv -pluggy==1.2.0 +pluggy==1.3.0 # via tox prefect==1.4.1 # via dioptra (pyproject.toml) @@ -567,7 +563,7 @@ prompt-toolkit==3.0.39 # via # ipython # jupyter-console -protobuf==4.23.4 +protobuf==4.24.3 # via mlflow psutil==5.9.5 # via @@ -575,7 +571,7 @@ psutil==5.9.5 # ipykernel pure-eval==0.2.2 # via stack-data -pyarrow==12.0.1 +pyarrow==13.0.0 # via dioptra (pyproject.toml) pycodestyle==2.11.0 # via @@ -590,7 +586,7 @@ pyflakes==3.1.0 # via flake8 pygls==1.0.2 # via esbonio -pygments==2.15.1 +pygments==2.16.1 # via # ipython # jupyter-console @@ -600,15 +596,15 @@ pygments==2.15.1 # sphinx pyjwt==2.8.0 # via databricks-cli -pyparsing==3.0.9 +pyparsing==3.1.1 # via matplotlib -pyproject-api==1.5.3 +pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via build pyspellchecker==0.7.2 # via esbonio -python-box==7.0.1 +python-box==7.1.1 # via prefect python-dateutil==2.8.2 # via @@ -632,7 +628,7 @@ python-slugify==8.0.1 # prefect pytoml==0.1.21 # via dioptra (pyproject.toml) -pytz==2023.3 +pytz==2023.3.post1 # via # flask-restx # mlflow @@ -661,25 +657,25 @@ pyyaml==6.0.1 # jupyter-events # mlflow # prefect -pyzmq==25.1.0 +pyzmq==25.1.1 # via # ipykernel # jupyter-client # jupyter-console # jupyter-server # qtconsole -qtconsole==5.4.3 +qtconsole==5.4.4 # via jupyter -qtpy==2.3.1 +qtpy==2.4.0 # via qtconsole querystring-parser==1.2.4 # via mlflow -redis==4.6.0 +redis==5.0.1 # via # dioptra # dioptra (pyproject.toml) # rq -referencing==0.30.0 +referencing==0.30.2 # via # jsonschema # jsonschema-specifications @@ -704,9 +700,9 @@ rfc3986-validator==0.1.1 # via # jsonschema # jupyter-events -rich==13.5.2 +rich==13.5.3 # via dioptra (pyproject.toml) -rpds-py==0.9.2 +rpds-py==0.10.3 # via # jsonschema # referencing @@ -714,7 +710,7 @@ rq==1.15.1 # via # dioptra # dioptra (pyproject.toml) -s3transfer==0.6.1 +s3transfer==0.7.0 # via boto3 scikit-image==0.21.0 # via imgaug @@ -722,7 +718,7 @@ scikit-learn==1.0.2 # via # adversarial-robustness-toolbox # dioptra (pyproject.toml) -scipy==1.11.1 +scipy==1.11.3 # via # adversarial-robustness-toolbox # dioptra @@ -746,7 +742,7 @@ six==1.16.0 # python-dateutil # querystring-parser # rfc3339-validator -smmap==5.0.0 +smmap==5.0.1 # via gitdb sniffio==1.3.0 # via anyio @@ -756,7 +752,7 @@ snowballstemmer==2.2.0 # sphinx sortedcontainers==2.4.0 # via distributed -soupsieve==2.4.1 +soupsieve==2.5 # via beautifulsoup4 sphinx==4.5.0 # via @@ -803,7 +799,7 @@ text-unidecode==1.3 # via python-slugify threadpoolctl==3.2.0 # via scikit-learn -tifffile==2023.7.18 +tifffile==2023.9.26 # via scikit-image tinycss2==1.2.1 # via nbconvert @@ -826,7 +822,7 @@ toolz==0.12.0 # dask # distributed # partd -tornado==6.3.2 +tornado==6.3.3 # via # distributed # ipykernel @@ -835,13 +831,13 @@ tornado==6.3.2 # jupyterlab # notebook # terminado -tox==4.6.4 +tox==4.11.3 # via dioptra (pyproject.toml) -tqdm==4.65.0 +tqdm==4.66.1 # via # adversarial-robustness-toolbox # kaggle -traitlets==5.9.0 +traitlets==5.10.1 # via # comm # ipykernel @@ -860,7 +856,7 @@ traitlets==5.9.0 # qtconsole typeguard==3.0.2 # via pygls -typing-extensions==4.7.1 +typing-extensions==4.8.0 # via # alembic # async-lru @@ -871,7 +867,7 @@ typing-extensions==4.7.1 # typeguard tzdata==2023.3 # via pandas -universal-pathlib==0.0.24 +universal-pathlib==0.1.3 # via dioptra (pyproject.toml) uri-template==1.3.0 # via jsonschema @@ -884,7 +880,7 @@ urllib3==1.26.16 # kaggle # prefect # requests -virtualenv==20.24.2 +virtualenv==20.24.5 # via tox waitress==2.1.2 # via mlflow @@ -896,7 +892,7 @@ webencodings==0.5.1 # via # bleach # tinycss2 -websocket-client==1.6.1 +websocket-client==1.6.3 # via # docker # jupyter-server @@ -907,11 +903,11 @@ werkzeug==2.1.2 # flask # flask-accepts # flask-restx -wheel==0.41.0 +wheel==0.41.2 # via # dioptra (pyproject.toml) # pip-tools -widgetsnbextension==4.0.8 +widgetsnbextension==4.0.9 # via ipywidgets wtforms[email]==3.0.1 # via @@ -922,7 +918,7 @@ yarl==1.9.2 # via aiohttp zict==3.0.0 # via distributed -zipp==3.16.2 +zipp==3.17.0 # via # importlib-metadata # importlib-resources diff --git a/src/dioptra/mlflow_plugins/dioptra_clients.py b/src/dioptra/mlflow_plugins/dioptra_clients.py index b056b4e57..710345813 100644 --- a/src/dioptra/mlflow_plugins/dioptra_clients.py +++ b/src/dioptra/mlflow_plugins/dioptra_clients.py @@ -36,7 +36,7 @@ class DioptraDatabaseClient(object): @property def app(self) -> Flask: - app: Flask = create_app(env=self.restapi_env) + app = create_app(env=self.restapi_env) return app @property diff --git a/src/dioptra/restapi/app.py b/src/dioptra/restapi/app.py index 586740941..e7b34ef89 100644 --- a/src/dioptra/restapi/app.py +++ b/src/dioptra/restapi/app.py @@ -29,14 +29,16 @@ import structlog from flask import Flask, jsonify from flask_cors import CORS -from flask_injector import FlaskInjector from flask_migrate import Migrate from flask_restx import Api from flask_sqlalchemy import SQLAlchemy from flask_wtf import CSRFProtect +from injector import Injector from sqlalchemy import MetaData from structlog.stdlib import BoundLogger +from dioptra.restapi.utils import setup_injection + from .__version__ import __version__ as API_VERSION LOGGER: BoundLogger = structlog.stdlib.get_logger() @@ -57,18 +59,15 @@ migrate: Migrate = Migrate() -def create_app(env: Optional[str] = None, inject_dependencies: bool = True): +def create_app(env: Optional[str] = None, injector: Optional[Injector] = None) -> Flask: """Creates and configures a fresh instance of the Dioptra REST API. Args: env: The configuration environment to use for the application. The allowed values are `"dev"`, `"prod"` and `"test"`. If `None`, the `"test"` configuration is used. The default is `None`. - inject_dependencies: Controls whether or not the dependency injection settings - in the ``dependencies.py`` files will be used. If `False`, then dependency - injection is not used and the configuration of the shared services must be - handled after the :py:class:`~flask.Flask` object is created. This is mostly - useful when performing unit tests. The default is `True`. + injector: A dependency injector used to invoke restx views. If None, + a default will be created. Returns: An initialized and configured :py:class:`~flask.Flask` object. @@ -91,11 +90,9 @@ def create_app(env: Optional[str] = None, inject_dependencies: bool = True): doc=app.config["DIOPTRA_SWAGGER_PATH"], url_scheme=app.config["DIOPTRA_BASE_URL"], ) - modules: List[Callable[..., Any]] = [bind_dependencies] register_routes(api, app) register_error_handlers(api) - register_providers(modules) csrf.init_app(app) db.init_app(app) @@ -113,9 +110,11 @@ def health(): log = LOGGER.new(request_id=str(uuid.uuid4())) # noqa: F841 return jsonify("healthy") - if not inject_dependencies: - return app + if not injector: + modules: List[Callable[..., Any]] = [bind_dependencies] + register_providers(modules) + injector = Injector(modules) - FlaskInjector(app=app, modules=modules) + setup_injection(api, injector) return app diff --git a/src/dioptra/restapi/auth/errors.py b/src/dioptra/restapi/auth/errors.py index 04512bad8..9ef3410f9 100644 --- a/src/dioptra/restapi/auth/errors.py +++ b/src/dioptra/restapi/auth/errors.py @@ -44,3 +44,34 @@ def handle_logout_error(error): }, 500 ) + +# This Software (Dioptra) is being made available as a public service by the +# National Institute of Standards and Technology (NIST), an Agency of the United +# States Department of Commerce. This software was developed in part by employees of +# NIST and in part by NIST contractors. Copyright in portions of this software that +# were developed by NIST contractors has been licensed or assigned to NIST. Pursuant +# to Title 17 United States Code Section 105, works of NIST employees are not +# subject to copyright protection in the United States. However, NIST may hold +# international copyright in software created by its employees and domestic +# copyright (or licensing rights) in portions of software that were assigned or +# licensed to NIST. To the extent that NIST holds copyright in this software, it is +# being made available under the Creative Commons Attribution 4.0 International +# license (CC BY 4.0). The disclaimers of the CC BY 4.0 license apply to all parts +# of the software developed or licensed by NIST. +# +# ACCESS THE FULL CC BY 4.0 LICENSE HERE: +# https://creativecommons.org/licenses/by/4.0/legalcode +"""Error handlers for the auth endpoints.""" +from __future__ import annotations + +from flask_restx import Api + + +class LogoutError(Exception): + """The current user was not logged out.""" + + +def register_error_handlers(api: Api) -> None: + @api.errorhandler(LogoutError) + def handle_logout_error(error): + return {"message": "Internal Service Error - The current user was not logged out."}, 500 diff --git a/src/dioptra/restapi/experiment/dependencies.py b/src/dioptra/restapi/experiment/dependencies.py index 1fffa555b..af134fce6 100644 --- a/src/dioptra/restapi/experiment/dependencies.py +++ b/src/dioptra/restapi/experiment/dependencies.py @@ -19,10 +19,11 @@ from typing import Any, Callable, List -from flask_injector import request from injector import Binder, Module, provider from mlflow.tracking import MlflowClient +from dioptra.restapi.shared.request_scope import request + from .schema import ExperimentRegistrationFormSchema diff --git a/src/dioptra/restapi/job/dependencies.py b/src/dioptra/restapi/job/dependencies.py index c744884fb..6bcb3009e 100644 --- a/src/dioptra/restapi/job/dependencies.py +++ b/src/dioptra/restapi/job/dependencies.py @@ -23,10 +23,10 @@ from boto3.session import Session from botocore.client import BaseClient -from flask_injector import request from injector import Binder, Module, provider from redis import Redis +from dioptra.restapi.shared.request_scope import request from dioptra.restapi.shared.rq.service import RQService from .schema import JobFormSchema diff --git a/src/dioptra/restapi/shared/request_scope.py b/src/dioptra/restapi/shared/request_scope.py new file mode 100644 index 000000000..2db2f1ee8 --- /dev/null +++ b/src/dioptra/restapi/shared/request_scope.py @@ -0,0 +1,136 @@ +# Code below is copied from Flask-Injector library. It has the following +# license: +# +# Copyright (c) 2012, Alec Thomas +# Copyright (c) 2015 Smarkets Limited +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# - Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# - Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# - Neither the name of SwapOff.org nor the names of its contributors may +# be used to endorse or promote products derived from this software without +# specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Tell flake8 to ignore this file. It is mostly just copy-pastes from +# Flask-Injector. +# flake8: noqa + +from typing import Any, Dict + +import flask +from injector import Injector, Provider, Scope, ScopeDecorator +from werkzeug.local import Local, LocalManager + + +class CachedProviderWrapper(Provider): + def __init__(self, old_provider: Provider) -> None: + self._old_provider = old_provider + self._cache = {} # type: Dict[int, Any] + + def get(self, injector: Injector) -> Any: + key = id(injector) + try: + return self._cache[key] + except KeyError: + instance = self._cache[key] = self._old_provider.get(injector) + return instance + + +class RequestScope(Scope): + """A scope whose object lifetime is tied to a request. + + @request + class Session: + pass + """ + + # We don't want to assign here, just provide type hints + if False: + _local_manager = None # type: LocalManager + _locals = None # type: Any + + def cleanup(self) -> None: + self._local_manager.cleanup() + + def prepare(self) -> None: + self._locals.scope = {} + + def configure(self) -> None: + self._locals = Local() + self._local_manager = LocalManager([self._locals]) + self.prepare() + + def get(self, key: Any, provider: Provider) -> Any: + try: + return self._locals.scope[key] + except KeyError: + new_provider = self._locals.scope[key] = CachedProviderWrapper(provider) + return new_provider + + +request = ScopeDecorator(RequestScope) + + +def set_request_scope_callbacks(app: flask.Flask, injector: Injector) -> None: + """ + Set callbacks to enable request scoping behavior: initialize at the + beginning of request handling, and cleanup at the end. + + Args: + app: A Flask app + injector: An injector, used to get the RequestScope object + """ + + def reset_request_scope_before(*args: Any, **kwargs: Any) -> None: + injector.get(RequestScope).prepare() + + def global_reset_request_scope_after(*args: Any, **kwargs: Any) -> None: + blueprint = flask.request.blueprint + # If current blueprint has teardown_request_funcs associated with it we know there may be + # a some teardown request handlers we need to inject into, so we can't reset the scope just yet. + # We'll leave it to blueprint_reset_request_scope_after to do the job which we know will run + # later and we know it'll run after any teardown_request handlers we may want to inject into. + if blueprint is None or blueprint not in app.teardown_request_funcs: + injector.get(RequestScope).cleanup() + + def blueprint_reset_request_scope_after(*args: Any, **kwargs: Any) -> None: + # If we got here we truly know this is the last teardown handler, which means we can reset the + # scope unconditionally. + injector.get(RequestScope).cleanup() + + app.before_request_funcs.setdefault(None, []).insert(0, reset_request_scope_before) + # We're accessing Flask internals here as the app.teardown_request decorator appends to a list of + # handlers but Flask itself reverses the list when it executes them. To allow injecting request-scoped + # dependencies into teardown_request handlers we need to run our teardown_request handler after them. + # Also see https://github.com/alecthomas/flask_injector/issues/42 where it was reported. + # Secondly, we need to handle blueprints. Flask first executes non-blueprint teardown handlers in + # reverse order and only then executes blueprint-associated teardown handlers in reverse order, + # which means we can't just set on non-blueprint teardown handler, but we need to set both. + # In non-blueprint teardown handler we check if a blueprint handler will run – if so, we do nothing + # there and leave it to the blueprint teardown handler. + # + # We need the None key to be present in the dictionary so that the dictionary iteration always yields + # None as well. We *always* have to set the global teardown request. + app.teardown_request_funcs.setdefault(None, []).insert( + 0, global_reset_request_scope_after + ) + for bp, functions in app.teardown_request_funcs.items(): + if bp is not None: + functions.insert(0, blueprint_reset_request_scope_after) diff --git a/src/dioptra/restapi/task_plugin/service.py b/src/dioptra/restapi/task_plugin/service.py index e49455925..1bd0969ea 100644 --- a/src/dioptra/restapi/task_plugin/service.py +++ b/src/dioptra/restapi/task_plugin/service.py @@ -78,7 +78,7 @@ def create( plugin_uri_list: Optional[List[str]] = self._s3_service.upload_directory( directory=tmpdir, bucket=bucket, - prefix=str(prefix), + prefix=prefix.as_posix(), include_suffixes=[".py"], log=log, ) @@ -114,7 +114,11 @@ def delete( return [] prefix: Path = Path(collection) / task_plugin_name - self._s3_service.delete_prefix(bucket=bucket, prefix=str(prefix), log=log) + self._s3_service.delete_prefix( + bucket=bucket, + prefix=prefix.as_posix(), + log=log, + ) log.info( "TaskPlugin deleted", @@ -175,7 +179,7 @@ def get_by_name_in_collection( task_plugin_name=task_plugin_name, ) - prefix = Path(collection) / task_plugin_name + prefix: Path = Path(collection) / task_plugin_name modules: List[str] = self._s3_service.list_objects( bucket=bucket, prefix=self._s3_service.normalize_prefix(str(prefix), log=log), diff --git a/src/dioptra/restapi/utils.py b/src/dioptra/restapi/utils.py index ae782ba33..52026cb8c 100644 --- a/src/dioptra/restapi/utils.py +++ b/src/dioptra/restapi/utils.py @@ -23,12 +23,17 @@ """ from __future__ import annotations -from typing import List +import functools +from typing import Any, Callable, List, Protocol, Type -from flask_restx import Namespace +from flask.views import View +from flask_restx import Api, Namespace, Resource from flask_restx.reqparse import RequestParser +from injector import Injector from typing_extensions import TypedDict +from dioptra.restapi.shared.request_scope import set_request_scope_callbacks + class ParametersSchema(TypedDict, total=False): """A schema of the parameters that can be passed to the |RequestParser|.""" @@ -79,3 +84,98 @@ def slugify(text: str) -> str: """ return text.lower().strip().replace(" ", "-") + + +class _ClassBasedViewFunction(Protocol): + """ + We distinguish a class-based view function from other view functions + by looking for a "view_class" attribute on the function. + """ + + view_class: Type[View] + + def __call__(self, *args, **kwargs) -> Any: + ... + + +def _new_class_view_function( + func: _ClassBasedViewFunction, injector: Injector, api: Api +) -> Callable[..., Any]: + """ + Create a view function which supports injection, based on the given + class-based view function. "Wrapping" func won't work here, in the sense + that our view function can't delegate to func since the latter does not + support dependency-injected view object creation. So we create a brand new + one (@wrap'd, so it has the look of func at least), which does + dependency-injected view creation. + + Args: + func: The old class-based view function + injector: An injector + api: The flask_restx Api instance + + Returns: + A new view function + """ + + is_restx_resource = issubclass(func.view_class, Resource) + + additional_kwargs = {} + if is_restx_resource: + additional_kwargs["api"] = api + + # Honoring init_every_request is simple enough to do, so why not. + # It was added in Flask 2.2.0; it behaved as though True, previously. + init_every_request = getattr(func.view_class, "init_every_request", True) + + if not init_every_request: + view_obj = injector.create_object( + func.view_class, additional_kwargs=additional_kwargs + ) + + @functools.wraps( + func, + assigned=functools.WRAPPER_ASSIGNMENTS + + ("view_class", "methods", "provide_automatic_options"), + ) + def new_view_func(*args, **kwargs): + nonlocal view_obj + if init_every_request: + view_obj = injector.create_object( + func.view_class, additional_kwargs=additional_kwargs + ) + + return view_obj.dispatch_request(*args, **kwargs) + + if is_restx_resource: + new_view_func = api.output(new_view_func) + + return new_view_func + + +def setup_injection(api: Api, injector: Injector) -> None: + """ + Fixup the given flask app such that class-based view functions support + dependency injection. + + Args: + api: A flask_restx Api object. This contains the flask app, and is + also necessary to make restx views (resources) work with + dependency injection. + injector: An injector + """ + + new_view_func: Callable[..., Any] + + for key, func in api.app.view_functions.items(): + if hasattr(func, "view_class"): + new_view_func = _new_class_view_function(func, injector, api) + api.app.view_functions[key] = new_view_func + + set_request_scope_callbacks(api.app, injector) + + # Uncomment to see more detailed logging regarding dependency injection + # in debug mode. + # if api.app.debug: + # injector_logger = logging.getLogger("injector") + # injector_logger.setLevel(logging.DEBUG) diff --git a/tests/cookiecutter_dioptra_deployment/test_create_template.py b/tests/cookiecutter_dioptra_deployment/test_create_template.py index 09d6f03f7..bf345660d 100644 --- a/tests/cookiecutter_dioptra_deployment/test_create_template.py +++ b/tests/cookiecutter_dioptra_deployment/test_create_template.py @@ -32,7 +32,7 @@ def check_paths(paths): if is_binary(str(path)): continue - for line in path.open("r"): + for line in path.open("r", encoding="utf-8"): match = RE_OBJ.search(line) assert match is None, f"cookiecutter variable not replaced in {path}" diff --git a/tests/unit/restapi/conftest.py b/tests/unit/restapi/conftest.py index 521c67d97..c7ddd45c4 100644 --- a/tests/unit/restapi/conftest.py +++ b/tests/unit/restapi/conftest.py @@ -25,11 +25,12 @@ from boto3.session import Session from botocore.client import BaseClient from flask import Flask -from flask_injector import FlaskInjector, request from flask_restx import Api from flask_sqlalchemy import SQLAlchemy from injector import Binder, Injector from redis import Redis +from dioptra.restapi.utils import setup_injection +from dioptra.restapi.shared.request_scope import request @pytest.fixture(scope="session") @@ -169,8 +170,8 @@ def register_test_routes(api: Api, app: Flask) -> None: monkeypatch.setattr(dioptra.restapi.routes, "register_routes", register_test_routes) - app: Flask = create_app(env="test", inject_dependencies=False) - FlaskInjector(app=app, modules=dependency_modules) + injector = Injector(dependency_modules) + app = create_app(env="test", injector=injector) return app diff --git a/tests/unit/task_plugins/dioptra_builtins/artifacts/test_mlflow.py b/tests/unit/task_plugins/dioptra_builtins/artifacts/test_mlflow.py index 709ffee57..a1973081e 100644 --- a/tests/unit/task_plugins/dioptra_builtins/artifacts/test_mlflow.py +++ b/tests/unit/task_plugins/dioptra_builtins/artifacts/test_mlflow.py @@ -16,7 +16,6 @@ # https://creativecommons.org/licenses/by/4.0/legalcode from __future__ import annotations -import os import uuid from copy import deepcopy from pathlib import Path @@ -48,7 +47,7 @@ def download_artifacts( ) -> str: if dst_path is None: dst_path = "tmp_unit_test" - dst_path = os.path.abspath(dst_path) + dst_path = str(Path(dst_path).absolute()) dst_local_path = dst_path return dst_local_path @@ -73,7 +72,7 @@ def create_run(self, experiment_id, start_time=56020, tags=None, run_name=None): if not run_name_tag: tags.append(RunTag(key=MLFLOW_RUN_NAME, value=run_name)) run_uuid = uuid.uuid4().hex - artifact_uri = os.path.join("/path/to/artifacts/", run_uuid, "artifacts") + artifact_uri = Path("/path/to/artifacts/") / run_uuid / "artifacts" run_info = RunInfo( run_uuid=run_uuid, @@ -157,7 +156,7 @@ def test_download_all_artifacts_in_run( dst_path = download_all_artifacts_in_run(run_id, artifact_path, destination_path) assert isinstance(dst_path, str) - assert destination_path == os.path.relpath(dst_path) + assert Path(destination_path) == Path(dst_path).relative_to(Path.cwd()) @pytest.mark.parametrize( @@ -199,10 +198,8 @@ def test_upload_data_frame_artifact( upload_data_frame_artifact(data_frame, file_name, file_format, None, working_dir) - pwd = "." if working_dir is None else working_dir - assert os.path.isfile( - Path(os.path.abspath(pwd)) / Path(file_name).with_suffix("." + output) - ) + pwd = Path("." if working_dir is None else working_dir).absolute() + assert (pwd / Path(file_name).with_suffix(f".{output}")).is_file() @pytest.mark.parametrize( @@ -235,8 +232,8 @@ def test_upload_directory_as_tarball_artifact( upload_directory_as_tarball_artifact( source_dir, tarball_filename, tarball_write_mode, working_dir ) - pwd = "." if working_dir is None else working_dir - assert os.path.isfile(Path(os.path.abspath(pwd)) / Path(tarball_filename)) + pwd = Path("." if working_dir is None else working_dir).absolute() + assert (pwd / tarball_filename).is_file() @pytest.mark.parametrize( diff --git a/tox.ini b/tox.ini index 72ee986db..391076411 100644 --- a/tox.ini +++ b/tox.ini @@ -279,18 +279,18 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-win-x86_64: pip-compile --output-file "venvs{/}win-x86_64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py310-win-x86_64: pip-compile --output-file "venvs{/}win-x86_64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py39-win-aarch64: pip-compile --output-file "venvs{/}win-aarch64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py310-win-aarch64: pip-compile --output-file "venvs{/}win-aarch64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py39-macos-x86_64: pip-compile --output-file "venvs{/}macos-x86_64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py310-macos-x86_64: pip-compile --output-file "venvs{/}macos-x86_64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py39-win-x86_64: pip-compile --output-file "venvs{/}win-amd64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py310-win-x86_64: pip-compile --output-file "venvs{/}win-amd64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py39-win-aarch64: pip-compile --output-file "venvs{/}win-arm64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py310-win-aarch64: pip-compile --output-file "venvs{/}win-arm64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py39-macos-x86_64: pip-compile --output-file "venvs{/}macos-amd64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py310-macos-x86_64: pip-compile --output-file "venvs{/}macos-amd64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in py39-macos-aarch64: pip-compile --output-file "venvs{/}macos-arm64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in py310-macos-aarch64: pip-compile --output-file "venvs{/}macos-arm64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py310-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in - py310-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py310-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in + py310-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.10-requirements-dev.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in [testenv:py{310,39}-{win,macos,linux}-{x86_64,aarch64}-requirements-dev-pytorch] deps = @@ -298,18 +298,18 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-win-x86_64: pip-compile --output-file "venvs{/}win-x86_64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py310-win-x86_64: pip-compile --output-file "venvs{/}win-x86_64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py39-win-aarch64: pip-compile --output-file "venvs{/}win-aarch64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py310-win-aarch64: pip-compile --output-file "venvs{/}win-aarch64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py39-macos-x86_64: pip-compile --output-file "venvs{/}macos-x86_64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py310-macos-x86_64: pip-compile --output-file "venvs{/}macos-x86_64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py39-win-x86_64: pip-compile --output-file "venvs{/}win-amd64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py310-win-x86_64: pip-compile --output-file "venvs{/}win-amd64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py39-win-aarch64: pip-compile --output-file "venvs{/}win-arm64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py310-win-aarch64: pip-compile --output-file "venvs{/}win-arm64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py39-macos-x86_64: pip-compile --output-file "venvs{/}macos-amd64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py310-macos-x86_64: pip-compile --output-file "venvs{/}macos-amd64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in py39-macos-aarch64: pip-compile --output-file "venvs{/}macos-arm64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in py310-macos-aarch64: pip-compile --output-file "venvs{/}macos-arm64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py310-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in - py310-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py310-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in + py310-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.10-requirements-dev-pytorch.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-pytorch.in [testenv:py{310,39}-{win,macos,linux}-{x86_64,aarch64}-requirements-dev-tensorflow] deps = @@ -317,18 +317,18 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-win-x86_64: pip-compile --output-file "venvs{/}win-x86_64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py310-win-x86_64: pip-compile --output-file "venvs{/}win-x86_64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py39-win-aarch64: pip-compile --output-file "venvs{/}win-aarch64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py310-win-aarch64: pip-compile --output-file "venvs{/}win-aarch64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py39-macos-x86_64: pip-compile --output-file "venvs{/}macos-x86_64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py310-macos-x86_64: pip-compile --output-file "venvs{/}macos-x86_64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py39-win-x86_64: pip-compile --output-file "venvs{/}win-amd64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py310-win-x86_64: pip-compile --output-file "venvs{/}win-amd64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py39-win-aarch64: pip-compile --output-file "venvs{/}win-arm64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py310-win-aarch64: pip-compile --output-file "venvs{/}win-arm64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py39-macos-x86_64: pip-compile --output-file "venvs{/}macos-amd64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py310-macos-x86_64: pip-compile --output-file "venvs{/}macos-amd64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in py39-macos-aarch64: pip-compile --output-file "venvs{/}macos-arm64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in py310-macos-aarch64: pip-compile --output-file "venvs{/}macos-arm64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py310-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in - py310-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py310-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in + py310-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.10-requirements-dev-tensorflow.txt" --resolver=backtracking --extra dev --extra examples --extra sdk --extra taskplugins --extra cookiecutter --upgrade --verbose "pyproject.toml" requirements-dev.in requirements-dev-tensorflow.in [testenv:py39-linux-{x86_64,aarch64}-mlflow-tracking-requirements] deps = @@ -336,8 +336,8 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-mlflow-tracking-requirements.txt" --resolver=backtracking --upgrade --verbose "docker{/}pip-tools{/}mlflow-tracking-requirements.in" - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-mlflow-tracking-requirements.txt" --resolver=backtracking --upgrade --verbose "docker{/}pip-tools{/}mlflow-tracking-requirements.in" + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-mlflow-tracking-requirements.txt" --resolver=backtracking --upgrade --verbose "docker{/}pip-tools{/}mlflow-tracking-requirements.in" + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-mlflow-tracking-requirements.txt" --resolver=backtracking --upgrade --verbose "docker{/}pip-tools{/}mlflow-tracking-requirements.in" [testenv:py39-linux-{x86_64,aarch64}-restapi-requirements] deps = @@ -345,8 +345,8 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-restapi-requirements.txt" --resolver=backtracking --upgrade --verbose "pyproject.toml" "docker{/}pip-tools{/}restapi-requirements.in" - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-restapi-requirements.txt" --resolver=backtracking --upgrade --verbose "pyproject.toml" "docker{/}pip-tools{/}restapi-requirements.in" + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-restapi-requirements.txt" --resolver=backtracking --upgrade --verbose "pyproject.toml" "docker{/}pip-tools{/}restapi-requirements.in" + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-restapi-requirements.txt" --resolver=backtracking --upgrade --verbose "pyproject.toml" "docker{/}pip-tools{/}restapi-requirements.in" [testenv:py39-linux-{x86_64,aarch64}-tensorflow2-cpu-requirements] deps = @@ -354,8 +354,8 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-tensorflow2-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-tensorflow.in" "docker{/}pip-tools{/}worker-requirements.in" - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-tensorflow2-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-tensorflow.in" "docker{/}pip-tools{/}worker-requirements.in" + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-tensorflow2-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-tensorflow.in" "docker{/}pip-tools{/}worker-requirements.in" + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-tensorflow2-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-tensorflow.in" "docker{/}pip-tools{/}worker-requirements.in" [testenv:py39-linux-x86_64-tensorflow2-gpu-requirements] deps = @@ -363,7 +363,7 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-tensorflow2-gpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-tensorflow-gpu.in" "docker{/}pip-tools{/}worker-requirements.in" + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-tensorflow2-gpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-tensorflow-gpu.in" "docker{/}pip-tools{/}worker-requirements.in" [testenv:py39-linux-{x86_64,aarch64}-pytorch-cpu-requirements] deps = @@ -371,8 +371,8 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-pytorch-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-pytorch.in" "docker{/}pip-tools{/}worker-requirements.in" - py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-aarch64-py3.9-pytorch-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-pytorch.in" "docker{/}pip-tools{/}worker-requirements.in" + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-pytorch-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-pytorch.in" "docker{/}pip-tools{/}worker-requirements.in" + py39-linux-aarch64: pip-compile --output-file "venvs{/}linux-arm64-py3.9-pytorch-cpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-pytorch.in" "docker{/}pip-tools{/}worker-requirements.in" [testenv:py39-linux-x86_64-pytorch-gpu-requirements] deps = @@ -380,7 +380,7 @@ deps = skip_install = true commands_pre = python -c 'from pathlib import Path;Path("{tox_root}", "venvs").mkdir(exist_ok=True)' commands = - py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-x86_64-py3.9-pytorch-gpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-pytorch-gpu.in" "docker{/}pip-tools{/}worker-requirements.in" + py39-linux-x86_64: pip-compile --output-file "venvs{/}linux-amd64-py3.9-pytorch-gpu-requirements.txt" --resolver=backtracking --extra sdk --extra taskplugins --upgrade --verbose "pyproject.toml" "requirements-dev-pytorch-gpu.in" "docker{/}pip-tools{/}worker-requirements.in" [testenv:rstcheck] deps =