From 922651259f3297c13478d5c674a88f565ee83bf5 Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:06:52 +0200 Subject: [PATCH 01/32] Updated to ruff>=0.6.3 (from ruff==0.5.1) --- .github/workflows/_code_quality.yml | 4 ++-- requirements-dev.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/_code_quality.yml b/.github/workflows/_code_quality.yml index a5dd412f..d876cd41 100644 --- a/.github/workflows/_code_quality.yml +++ b/.github/workflows/_code_quality.yml @@ -19,7 +19,7 @@ jobs: uv pip install --system -r requirements.txt - name: Install ruff run: | - uv pip install --system ruff==0.5.1 + uv pip install --system ruff>=0.6.3 - name: Run ruff format run: ruff format --diff . @@ -39,7 +39,7 @@ jobs: uv pip install --system -r requirements.txt - name: Install ruff run: | - uv pip install --system ruff==0.5.1 + uv pip install --system ruff>=0.6.3 - name: Run ruff check run: ruff check --diff . diff --git a/requirements-dev.txt b/requirements-dev.txt index 39332668..87e857e1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,6 @@ pytest>=8.2 pytest-cov>=5.0 -ruff==0.5.1 +ruff>=0.6.3 pyright==1.1.371 Sphinx>=7.3 sphinx-argparse-cli>=1.16 From 1e2427df70e632b88823b406a2ebfd7b9a84f4c6 Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:06:52 +0200 Subject: [PATCH 02/32] Updated to pyright>=1.1.378 (from pyright==1.1.371) --- .github/workflows/_code_quality.yml | 2 +- requirements-dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/_code_quality.yml b/.github/workflows/_code_quality.yml index d876cd41..da304719 100644 --- a/.github/workflows/_code_quality.yml +++ b/.github/workflows/_code_quality.yml @@ -60,6 +60,6 @@ jobs: uv pip install --system pytest - name: Install pyright run: | - uv pip install --system pyright==1.1.371 + uv pip install --system pyright>=1.1.378 - name: Run pyright run: pyright . diff --git a/requirements-dev.txt b/requirements-dev.txt index 87e857e1..2ee2247d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ pytest>=8.2 pytest-cov>=5.0 ruff>=0.6.3 -pyright==1.1.371 +pyright>=1.1.378 Sphinx>=7.3 sphinx-argparse-cli>=1.16 myst-parser>=3.0 From bb887aa64e6674438417a4c111e45a277600c11d Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:06:53 +0200 Subject: [PATCH 03/32] Updated to sourcery>=1.22 (from sourcery==1.21) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 2ee2247d..0145eabf 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ Sphinx>=7.3 sphinx-argparse-cli>=1.16 myst-parser>=3.0 furo>=2024.5 -sourcery==1.21 +sourcery>=1.22 -r requirements.txt -r requirements-types.txt From c0fb6c093da2e0315c32bd184f3400163fb6b346 Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:06:58 +0200 Subject: [PATCH 04/32] Updated to pytest>=8.3 (from pytest>=8.2) --- requirements-dev.txt | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 0145eabf..dd6660b4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -pytest>=8.2 +pytest>=8.3 pytest-cov>=5.0 ruff>=0.6.3 pyright>=1.1.378 diff --git a/tox.ini b/tox.ini index ab6c2e4e..52296a52 100644 --- a/tox.ini +++ b/tox.ini @@ -21,7 +21,7 @@ skip_covered = True [testenv] system_site_packages = True deps = - pytest>=8.2 + pytest>=8.3 pytest-cov>=5.0 commands = pytest --cov --cov-config tox.ini {posargs} From 276bc683d8d9a7a3f7bdace033f5c8aa4d2404bf Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:06:59 +0200 Subject: [PATCH 05/32] Updated to Sphinx>=8.0 (from Sphinx>=7.3) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index dd6660b4..cda7e138 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ pytest>=8.3 pytest-cov>=5.0 ruff>=0.6.3 pyright>=1.1.378 -Sphinx>=7.3 +Sphinx>=8.0 sphinx-argparse-cli>=1.16 myst-parser>=3.0 furo>=2024.5 From c2a49ac7809f86594d5f44262478cb656197da33 Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:06:59 +0200 Subject: [PATCH 06/32] Updated to sphinx-argparse-cli>=1.17 (from sphinx-argparse-cli>=1.16) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index cda7e138..0983a474 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,7 +3,7 @@ pytest-cov>=5.0 ruff>=0.6.3 pyright>=1.1.378 Sphinx>=8.0 -sphinx-argparse-cli>=1.16 +sphinx-argparse-cli>=1.17 myst-parser>=3.0 furo>=2024.5 sourcery>=1.22 From 52f67da961fb9082d39eed102631b3e9a47e0c0c Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:07:00 +0200 Subject: [PATCH 07/32] Updated to myst-parser>=4.0 (from myst-parser>=3.0) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 0983a474..c533caae 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,7 +4,7 @@ ruff>=0.6.3 pyright>=1.1.378 Sphinx>=8.0 sphinx-argparse-cli>=1.17 -myst-parser>=3.0 +myst-parser>=4.0 furo>=2024.5 sourcery>=1.22 From 34aa3fca468e7d5b301d3745f755c77b840fe83f Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:07:01 +0200 Subject: [PATCH 08/32] Updated to furo>=2024.8 (from furo>=2024.5) --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index c533caae..bf43243c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ pyright>=1.1.378 Sphinx>=8.0 sphinx-argparse-cli>=1.17 myst-parser>=4.0 -furo>=2024.5 +furo>=2024.8 sourcery>=1.22 -r requirements.txt From c57b8eb79b29b2448d5943aad777c1e3ed857822 Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:07:04 +0200 Subject: [PATCH 09/32] GitHub workflows: Install dependencies: change from 'pip install' to 'uv pip install' --- .../workflows/_build_and_publish_documentation.yml | 2 +- .github/workflows/_build_package.yml | 2 +- .github/workflows/_code_quality.yml | 14 +++++++------- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/_build_and_publish_documentation.yml b/.github/workflows/_build_and_publish_documentation.yml index 487f638a..610d57e9 100644 --- a/.github/workflows/_build_and_publish_documentation.yml +++ b/.github/workflows/_build_and_publish_documentation.yml @@ -26,7 +26,7 @@ jobs: python -m pip install uv - name: Install dependencies run: | - uv pip install --system -r requirements-dev.txt + uv uv pip install --system --system -r requirements-dev.txt - name: Print debugging information run: | echo "github.ref:" ${{github.ref}} diff --git a/.github/workflows/_build_package.yml b/.github/workflows/_build_package.yml index 85ba25df..8987b25f 100644 --- a/.github/workflows/_build_package.yml +++ b/.github/workflows/_build_package.yml @@ -19,7 +19,7 @@ jobs: python -m pip install uv - name: Install build and twine run: | - uv pip install --system build twine + uv uv pip install --system --system build twine - name: Run build run: python -m build - name: Run twine check diff --git a/.github/workflows/_code_quality.yml b/.github/workflows/_code_quality.yml index da304719..440bff99 100644 --- a/.github/workflows/_code_quality.yml +++ b/.github/workflows/_code_quality.yml @@ -16,10 +16,10 @@ jobs: python -m pip install uv - name: Install dependencies run: | - uv pip install --system -r requirements.txt + uv uv pip install --system --system -r requirements.txt - name: Install ruff run: | - uv pip install --system ruff>=0.6.3 + uv uv pip install --system --system ruff>=0.6.3 - name: Run ruff format run: ruff format --diff . @@ -36,10 +36,10 @@ jobs: python -m pip install uv - name: Install dependencies run: | - uv pip install --system -r requirements.txt + uv uv pip install --system --system -r requirements.txt - name: Install ruff run: | - uv pip install --system ruff>=0.6.3 + uv uv pip install --system --system ruff>=0.6.3 - name: Run ruff check run: ruff check --diff . @@ -56,10 +56,10 @@ jobs: python -m pip install uv - name: Install dependencies run: | - uv pip install --system -r requirements.txt - uv pip install --system pytest + uv uv pip install --system --system -r requirements.txt + uv uv pip install --system --system pytest - name: Install pyright run: | - uv pip install --system pyright>=1.1.378 + uv uv pip install --system --system pyright>=1.1.378 - name: Run pyright run: pyright . From 5399a39dacbd7bcd6d2e958d5074b6ba242b286a Mon Sep 17 00:00:00 2001 From: Claas Date: Sat, 5 Oct 2024 22:07:04 +0200 Subject: [PATCH 10/32] updated CHANGELOG.md --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef97cc9e..a856005a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,17 @@ The changelog format is based on [Keep a Changelog](https://keepachangelog.com/e ## [Unreleased] +### Dependencies +* Updated to ruff>=0.6.3 (from ruff==0.5.1) +* Updated to pyright>=1.1.378 (from pyright==1.1.371) +* Updated to sourcery>=1.22 (from sourcery==1.21) +* Updated to pytest>=8.3 (from pytest>=8.2) +* Updated to Sphinx>=8.0 (from Sphinx>=7.3) +* Updated to sphinx-argparse-cli>=1.17 (from sphinx-argparse-cli>=1.16) +* Updated to myst-parser>=4.0 (from myst-parser>=3.0) +* Updated to furo>=2024.8 (from furo>=2024.5) +* GitHub workflows: Install dependencies: change from 'pip install' to 'uv pip install' + ### Dependencies * Updated to download-artifact@v4 (from download-artifact@v3) From fdd683ec44fa1c2cc0cf0e551e82c3bbe1245240 Mon Sep 17 00:00:00 2001 From: Claas Date: Sun, 6 Oct 2024 22:22:06 +0200 Subject: [PATCH 11/32] updated all project related files with latest changes from python_project_template (files in root folder, .github, .vscode, as well as selected files in docs and tests) --- .coveragerc | 13 + .gitattributes | 32 + .../_build_and_publish_documentation.yml | 25 +- .github/workflows/_build_package.yml | 56 +- .github/workflows/_code_quality.yml | 139 +-- .github/workflows/_publish_package.yml | 2 +- .github/workflows/_test.yml | 25 +- .github/workflows/_test_future.yml | 25 +- .gitignore | 36 +- .pre-commit-config.yaml | 20 + .sourcery.yaml | 143 ++- .vscode/extensions.json | 1 + .vscode/launch.json | 870 ++++++++--------- .vscode/settings.json | 101 +- CHANGELOG.md | 899 +++++++++--------- README.md | 312 +++--- STYLEGUIDE.md | 847 ++++++++--------- demos/folder_for_demos.py | 1 + docs/source/_templates/custom-class.rst | 31 + docs/source/_templates/custom-module.rst | 66 ++ docs/source/api.rst | 16 +- docs/source/conf.py | 148 +-- pyproject.toml | 331 ++++--- pytest.ini | 10 +- qa.bat | 9 +- requirements-dev.txt | 12 - requirements-types.txt | 1 - requirements.txt | 9 - ruff.toml | 149 +++ tests/.gitignore | 2 + tests/conftest.py | 162 ++-- 31 files changed, 2421 insertions(+), 2072 deletions(-) create mode 100644 .coveragerc create mode 100644 .gitattributes create mode 100644 .pre-commit-config.yaml create mode 100644 demos/folder_for_demos.py create mode 100644 docs/source/_templates/custom-class.rst create mode 100644 docs/source/_templates/custom-module.rst delete mode 100644 requirements-dev.txt delete mode 100644 requirements-types.txt delete mode 100644 requirements.txt create mode 100644 ruff.toml diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..7289f881 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,13 @@ +[paths] +source = + src/ospx + */site-packages/ospx + +[run] +source = ospx +branch = True + +[report] +fail_under = 10.0 +show_missing = True +skip_covered = True diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..ca69e9d4 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,32 @@ +# Set the default behavior, in case people don't have core.autocrlf set. +* text=auto + +# Explicitly declare text files you want to always be normalized and converted +# to native line endings on checkout. +*.py text +*.cpp text +*.hpp text +*.c text +*.h text +*.json text +*.xml text +*.txt text +*.yml text +*.yaml text +*.toml text +*.rst text +*.ini text + +# Declare files that will always have CRLF line endings on checkout. +*.vcproj text eol=crlf +*.sln text eol=crlf +*.md text eol=crlf + +# Declare files that will always have LF line endings on checkout. +*.sh text eol=lf + +# Declare files that will not be normalized regardless of their content. +*.jpg -text +*.png -text +*.gif -text +*.ico -text diff --git a/.github/workflows/_build_and_publish_documentation.yml b/.github/workflows/_build_and_publish_documentation.yml index 610d57e9..2fed60c7 100644 --- a/.github/workflows/_build_and_publish_documentation.yml +++ b/.github/workflows/_build_and_publish_documentation.yml @@ -15,18 +15,18 @@ jobs: - name: Checkout active branch uses: actions/checkout@v4 with: - fetch-depth: 1 lfs: true - - name: Install Python + - name: Install uv + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' - - name: Install uv - run: | - python -m pip install uv - - name: Install dependencies - run: | - uv uv pip install --system --system -r requirements-dev.txt + python-version-file: "pyproject.toml" + - name: Install the project + run: uv sync --upgrade - name: Print debugging information run: | echo "github.ref:" ${{github.ref}} @@ -38,16 +38,15 @@ jobs: git branch git branch -a git remote -v - python -V - pip list --not-required - pip list + uv run python -V + uv pip list # Build documentation - uses: sphinx-doc/github-problem-matcher@master - name: Build documentation run: | cd docs - make html + uv run make html - name: Clone and cleanup gh-pages branch run: | diff --git a/.github/workflows/_build_package.yml b/.github/workflows/_build_package.yml index 8987b25f..362d3aaa 100644 --- a/.github/workflows/_build_package.yml +++ b/.github/workflows/_build_package.yml @@ -1,29 +1,29 @@ -name: Build Package - -on: workflow_call - -jobs: - build: - name: Build source distribution - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 1 - lfs: true - - uses: actions/setup-python@v5 - with: - python-version: '3.11' +name: Build Package + +on: workflow_call + +jobs: + build: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + lfs: true - name: Install uv - run: | - python -m pip install uv - - name: Install build and twine - run: | - uv uv pip install --system --system build twine - - name: Run build - run: python -m build - - name: Run twine check - run: twine check --strict dist/* - - uses: actions/upload-artifact@v4 - with: - path: ./dist/*.tar.gz + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Build source distribution and wheel + run: uv build + - name: Run twine check + run: uvx twine check --strict dist/* + - uses: actions/upload-artifact@v4 + with: + path: | + dist/*.tar.gz + dist/*.whl diff --git a/.github/workflows/_code_quality.yml b/.github/workflows/_code_quality.yml index 440bff99..d3e65a71 100644 --- a/.github/workflows/_code_quality.yml +++ b/.github/workflows/_code_quality.yml @@ -1,65 +1,80 @@ -name: Code Quality - -on: workflow_call - -jobs: - ruff_format: - runs-on: ubuntu-latest - name: ruff format - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.11' +name: Code Quality + +on: workflow_call + +jobs: + ruff_format: + runs-on: ubuntu-latest + name: ruff format + steps: + - uses: actions/checkout@v4 - name: Install uv - run: | - python -m pip install uv - - name: Install dependencies - run: | - uv uv pip install --system --system -r requirements.txt - - name: Install ruff - run: | - uv uv pip install --system --system ruff>=0.6.3 - - name: Run ruff format - run: ruff format --diff . - - ruff_check: - runs-on: ubuntu-latest - name: ruff check - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.11' + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Install the project + run: uv sync --upgrade + - name: Run ruff format + run: uv run ruff format --diff + + ruff_check: + runs-on: ubuntu-latest + name: ruff check + steps: + - uses: actions/checkout@v4 - name: Install uv - run: | - python -m pip install uv - - name: Install dependencies - run: | - uv uv pip install --system --system -r requirements.txt - - name: Install ruff - run: | - uv uv pip install --system --system ruff>=0.6.3 - - name: Run ruff check - run: ruff check --diff . - - pyright: - runs-on: ubuntu-latest - name: pyright - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: '3.11' + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Install the project + run: uv sync --upgrade + - name: Run ruff check + run: uv run ruff check --diff + + pyright: + runs-on: ubuntu-latest + name: pyright + steps: + - uses: actions/checkout@v4 - name: Install uv - run: | - python -m pip install uv - - name: Install dependencies - run: | - uv uv pip install --system --system -r requirements.txt - uv uv pip install --system --system pytest - - name: Install pyright - run: | - uv uv pip install --system --system pyright>=1.1.378 - - name: Run pyright - run: pyright . + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Install the project + run: uv sync --upgrade + - name: Run pyright + run: uv run pyright + + mypy: + runs-on: ubuntu-latest + name: mypy + steps: + - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version-file: "pyproject.toml" + - name: Install the project + run: uv sync --upgrade + - name: Run mypy + run: uv run mypy diff --git a/.github/workflows/_publish_package.yml b/.github/workflows/_publish_package.yml index 2fcae45f..0779a17c 100644 --- a/.github/workflows/_publish_package.yml +++ b/.github/workflows/_publish_package.yml @@ -13,5 +13,5 @@ jobs: - uses: actions/download-artifact@v4 with: name: artifact - path: ./dist/ + path: dist - uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml index bbf6accb..3dc9a75b 100644 --- a/.github/workflows/_test.yml +++ b/.github/workflows/_test.yml @@ -4,31 +4,34 @@ on: workflow_call jobs: test: - name: Test on ${{matrix.python.toxenv}}-${{matrix.platform.toxenv}} + name: Test on ${{matrix.python.version}}-${{matrix.platform.runner}} runs-on: ${{ matrix.platform.runner }} strategy: matrix: platform: - runner: ubuntu-latest - toxenv: linux - runner: windows-latest - toxenv: windows + - runner: macos-latest python: - - version: '3.9' - toxenv: 'py39' - version: '3.10' - toxenv: 'py310' - version: '3.11' - toxenv: 'py311' - version: '3.12' - toxenv: 'py312' steps: - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" - name: Install Python ${{ matrix.python.version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python.version }} - - name: Install tox - run: python -m pip install tox-uv + - name: Install the project + run: uv sync --upgrade -p ${{ matrix.python.version }} --no-dev + - name: Install pytest + run: | + uv pip install pytest + uv pip install pytest-cov - name: Run pytest - run: tox -e ${{matrix.python.toxenv}}-${{matrix.platform.toxenv}} + run: uv run pytest --cov diff --git a/.github/workflows/_test_future.yml b/.github/workflows/_test_future.yml index 28ede257..0b46eb52 100644 --- a/.github/workflows/_test_future.yml +++ b/.github/workflows/_test_future.yml @@ -1,30 +1,37 @@ -name: Unit Tests (py312) -# Test also with Python 3.12 (experimental; workflow will not fail on error.) +name: Unit Tests (py313) +# Test also with Python 3.13 (experimental; workflow will not fail on error.) on: workflow_call jobs: test313: - name: Test on ${{matrix.python.toxenv}}-${{matrix.platform.toxenv}} (experimental) + name: Test on ${{matrix.python.version}}-${{matrix.platform.runner}} (experimental) continue-on-error: true runs-on: ${{ matrix.platform.runner }} strategy: matrix: platform: - runner: ubuntu-latest - toxenv: linux - runner: windows-latest - toxenv: windows python: - version: '3.13.0-alpha - 3.13.0' - toxenv: 'py313' + uvpy: '3.13' steps: - uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v2 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" - name: Install Python ${{ matrix.python.version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python.version }} - - name: Install tox - run: python -m pip install tox-uv + - name: Install the project + run: uv sync --upgrade -p ${{ matrix.python.uvpy }} --no-dev + - name: Install pytest + run: | + uv pip install pytest + uv pip install pytest-cov - name: Run pytest - run: tox -e ${{matrix.python.toxenv}}-${{matrix.platform.toxenv}} + run: uv run pytest --cov diff --git a/.gitignore b/.gitignore index ba7ab27e..e353c084 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ coverage.xml *.py,cover .hypothesis/ .pytest_cache/ +cover/ # Translations *.mo @@ -70,8 +71,10 @@ instance/ # Sphinx documentation docs/_build/ +_autosummary # PyBuilder +.pybuilder/ target/ # Jupyter Notebook @@ -84,14 +87,14 @@ ipython_config.py # pyenv .python-version -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# uv +# It is generally recommended to include `uv.lock` in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock +# having no cross-platform support, `uv` might install dependencies in one environment that don't work in another. +# In such case, `uv.lock` should be added to `.gitignore` +uv.lock -# PEP 582; used by e.g. github.com/David-OConnor/pyflow +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff @@ -128,13 +131,19 @@ dmypy.json # Pyre type checker .pyre/ +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Ruff +.ruff_cache + # PyCharm .idea -# modules -modules.txt - -# VS Code Settings +# VS Code .vscode/* !.vscode/settings.json !.vscode/tasks.json @@ -142,3 +151,10 @@ modules.txt !.vscode/extensions.json !.vscode/*.code-snippets +# Inside /demos folder: ignore temporary logs, db's and local data files +demos/**/*.log +demos/**/*.db +demos/**/*.nc + +# modules +modules.txt \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..b054a6fc --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,20 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: mixed-line-ending + fix: auto + - id: trailing-whitespace + - id: check-yaml + - id: check-toml + - id: check-merge-conflict + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.2 + hooks: + - id: ruff + - id: ruff-format + # - repo: https://github.com/pre-commit/mirrors-mypy + # rev: v1.9.0 + # hooks: + # - id: mypy + # exclude: tests/ diff --git a/.sourcery.yaml b/.sourcery.yaml index 88f5126f..a4019dfb 100644 --- a/.sourcery.yaml +++ b/.sourcery.yaml @@ -1,72 +1,71 @@ -# 🪄 This is your project's Sourcery configuration file. - -# You can use it to get Sourcery working in the way you want, such as -# ignoring specific refactorings, skipping directories in your project, -# or writing custom rules. - -# 📚 For a complete reference to this file, see the documentation at -# https://docs.sourcery.ai/Configuration/Project-Settings/ - -# This file was auto-generated by Sourcery on 2022-11-24 at 15:30. - -version: '1' # The schema version of this config file - -ignore: # A list of paths or files which Sourcery will ignore. -- .git -- .venv -- .tox -- build -- dist -- __pycache__ -- tests/library/spring_mass_damper/Damper -- tests/library/spring_mass_damper/Mass -- tests/library/spring_mass_damper/Spring - -rule_settings: - enable: - - default - disable: # A list of rule IDs Sourcery will never suggest. - - inline-immediately-returned-variable - rule_types: - - refactoring - - suggestion - - comment - python_version: '3.9' # A string specifying the lowest Python version your project supports. Sourcery will not suggest refactorings requiring a higher Python version. - -metrics: - quality_threshold: 20.0 # flag low-code-quality for functions and methods with a quality score below 20 % - -# rules: # A list of custom rules Sourcery will include in its analysis. -# - id: no-print-statements -# description: Do not use print statements in the test directory. -# pattern: print(...) -# language: python -# replacement: -# condition: -# explanation: -# paths: -# include: -# - test -# exclude: -# - conftest.py -# tests: [] -# tags: [] - -# rule_tags: {} # Additional rule tags. - -# github: -# labels: [] -# ignore_labels: -# - sourcery-ignore -# request_review: author -# sourcery_branch: sourcery/{base_branch} - -# clone_detection: -# min_lines: 3 -# min_duplicates: 2 -# identical_clones_only: false - -# proxy: -# url: -# ssl_certs_file: -# no_ssl_verify: false +# 🪄 This is your project's Sourcery configuration file. + +# You can use it to get Sourcery working in the way you want, such as +# ignoring specific refactorings, skipping directories in your project, +# or writing custom rules. + +# 📚 For a complete reference to this file, see the documentation at +# https://docs.sourcery.ai/Configuration/Project-Settings/ + +# This file was auto-generated by Sourcery on 2023-02-22 at 11:42. + +version: '1' # The schema version of this config file + +ignore: # A list of paths or files which Sourcery will ignore. + - .git + - .venv + - .tox + - dist + - __pycache__ + - tests/library/spring_mass_damper/Damper + - tests/library/spring_mass_damper/Mass + - tests/library/spring_mass_damper/Spring + +rule_settings: + enable: + - default + disable: # A list of rule IDs Sourcery will never suggest. + - inline-immediately-returned-variable + rule_types: + - refactoring + - suggestion + - comment + python_version: '3.10' # A string specifying the lowest Python version your project supports. Sourcery will not suggest refactorings requiring a higher Python version. + +# rules: # A list of custom rules Sourcery will include in its analysis. +# - id: no-print-statements +# description: Do not use print statements in the test directory. +# pattern: print(...) +# language: python +# replacement: +# condition: +# explanation: +# paths: +# include: +# - test +# exclude: +# - conftest.py +# tests: [] +# tags: [] + +# rule_tags: {} # Additional rule tags. + +metrics: + quality_threshold: 20.0 + +# github: +# labels: [] +# ignore_labels: +# - sourcery-ignore +# request_review: author +# sourcery_branch: sourcery/{base_branch} + +# clone_detection: +# min_lines: 3 +# min_duplicates: 2 +# identical_clones_only: false + +# proxy: +# url: +# ssl_certs_file: +# no_ssl_verify: false diff --git a/.vscode/extensions.json b/.vscode/extensions.json index a4d80c37..2cb66f60 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -11,6 +11,7 @@ "sourcery.sourcery", "njpwerner.autodocstring", "editorconfig.editorconfig", + "ms-python.mypy-type-checker", ], // List of extensions recommended by VS Code that should not be recommended for users of this workspace. "unwantedRecommendations": [] diff --git a/.vscode/launch.json b/.vscode/launch.json index d5506a3c..49e7c187 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,449 +1,423 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "Python: Debug Tests", - "type": "python", - "request": "launch", - "program": "${file}", - "purpose": [ - "debug-test" - ], - "console": "integratedTerminal", - "env": { - "PYTEST_ADDOPTS": "--no-cov" - }, - "autoReload": { - "enable": true - }, - "justMyCode": false, - }, - { - "name": "Python: Current File, cwd = file dir, envFile", - "type": "python", - "request": "launch", - "cwd": "${fileDirname}", // working dir = dir where current file is - "program": "${file}", - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "Python: Current File, cwd = workspace root folder, envFile", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}", // working dir = workspace (mvx) dir - "program": "${file}", - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder test_caseDict --inspect", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict", - "--inspect", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder test_caseDict_minimal_inspect --inspect", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict_minimal_inspect", - "--inspect", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "importSystemStructure test_import_OspSystemStructure.xml", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", - "args": [ - "test_import_OspSystemStructure.xml", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder test_caseDict_imported_test_graph", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict_imported_test_graph", - "--graph", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder test_caseDict --graph", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict", - "--graph", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "watchCosim test_caseDict", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", - "args": [ - "watchDict", - "-pd", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder test_caseDict_minimal_set_int_for_real", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\test_dicts", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict_minimal_set_int_for_real", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder house", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\house", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder variable_group --inspect", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict", - "--inspect", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "importSystemStructure variable_group OspSystemStructure.xml", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", - "args": [ - "OspSystemStructure.xml", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder variable_group", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "watchCosim variable_group", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", - "args": [ - "watchDict", - "-pd", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "importSystemStructure variable_group OspSystemStructure_original.xml", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", - "args": [ - "OspSystemStructure_original.xml", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder variable_group --inspect multiple_connection_types", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict_multiple_connection_types", - "--inspect", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder variable_group multiple_connection_types", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\variable_groups", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict_multiple_connection_types", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "importSystemStructure gunnerus-dp/control-system OspSystemStructure.xml (cwd=gunnerus-dp)", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", - "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", - "args": [ - "./config/control-system/OspSystemStructure.xml", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "importSystemStructure gunnerus-dp/control-system OspSystemStructure.xml (cwd=control-system)", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\gunnerus-dp\\config\\control-system", - "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", - "args": [ - "OspSystemStructure.xml", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder gunnerus-dp", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder gunnerus-dp --inspect", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "caseDict", - "--inspect", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "watchCosim gunnerus-dp", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", - "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", - "args": [ - "watchDict", - "-pd", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "importSystemStructure spring_mass_damper", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", - "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", - "args": [ - "test_OspSystemStructure_spring_mass_damper.xml", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder spring_mass_damper --inspect", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict", - "--inspect", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "ospCaseBuilder spring_mass_damper --graph", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", - "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", - "args": [ - "test_caseDict", - "--graph", - ], - "console": "integratedTerminal", - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - { - "name": "watchCosim spring_mass_damper", - "type": "python", - "request": "launch", - "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", - "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", - "args": [ - "watchDict", - "-pd", - ], - "console": "integratedTerminal", - "justMyCode": false, - "autoReload": { - "enable": true - }, - "justMyCode": false, - "envFile": "${workspaceFolder}/.env" // specify where .env file is - }, - ] +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: Debug Tests", + "type": "debugpy", + "request": "launch", + "program": "${file}", + "purpose": [ + "debug-test" + ], + "console": "integratedTerminal", + "env": { + "PYTEST_ADDOPTS": "--no-cov" + }, + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "Python: Current File, cwd = file dir", + "type": "debugpy", + "request": "launch", + "cwd": "${fileDirname}", // working dir = dir where current file is + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": true, + "autoReload": { + "enable": true + }, + }, + { + "name": "Python: Current File, cwd = workspace root folder", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}", // working dir = workspace root folder + "program": "${file}", + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder test_caseDict --inspect", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict", + "--inspect", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder test_caseDict_minimal_inspect --inspect", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict_minimal_inspect", + "--inspect", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "importSystemStructure test_import_OspSystemStructure.xml", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", + "args": [ + "test_import_OspSystemStructure.xml", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder test_caseDict_imported_test_graph", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict_imported_test_graph", + "--graph", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder test_caseDict --graph", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict", + "--graph", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "watchCosim test_caseDict", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", + "args": [ + "watchDict", + "-pd", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder test_caseDict_minimal_set_int_for_real", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\test_dicts", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict_minimal_set_int_for_real", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder house", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\house", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder variable_group --inspect", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict", + "--inspect", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "importSystemStructure variable_group OspSystemStructure.xml", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", + "args": [ + "OspSystemStructure.xml", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder variable_group", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "watchCosim variable_group", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", + "args": [ + "watchDict", + "-pd", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "importSystemStructure variable_group OspSystemStructure_original.xml", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", + "args": [ + "OspSystemStructure_original.xml", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder variable_group --inspect multiple_connection_types", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict_multiple_connection_types", + "--inspect", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder variable_group multiple_connection_types", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\variable_groups", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict_multiple_connection_types", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "importSystemStructure gunnerus-dp/control-system OspSystemStructure.xml (cwd=gunnerus-dp)", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", + "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", + "args": [ + "./config/control-system/OspSystemStructure.xml", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "importSystemStructure gunnerus-dp/control-system OspSystemStructure.xml (cwd=control-system)", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\gunnerus-dp\\config\\control-system", + "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", + "args": [ + "OspSystemStructure.xml", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder gunnerus-dp", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder gunnerus-dp --inspect", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "caseDict", + "--inspect", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "watchCosim gunnerus-dp", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\gunnerus-dp", + "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", + "args": [ + "watchDict", + "-pd", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "importSystemStructure spring_mass_damper", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", + "program": "${workspaceFolder}\\src\\ospx\\cli\\importSystemStructure.py", + "args": [ + "test_OspSystemStructure_spring_mass_damper.xml", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder spring_mass_damper --inspect", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict", + "--inspect", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "ospCaseBuilder spring_mass_damper --graph", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", + "program": "${workspaceFolder}\\src\\ospx\\cli\\ospCaseBuilder.py", + "args": [ + "test_caseDict", + "--graph", + ], + "console": "integratedTerminal", + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + { + "name": "watchCosim spring_mass_damper", + "type": "debugpy", + "request": "launch", + "cwd": "${workspaceFolder}\\tests\\spring_mass_damper", + "program": "${workspaceFolder}\\src\\ospx\\watch\\cli\\watchCosim.py", + "args": [ + "watchDict", + "-pd", + ], + "console": "integratedTerminal", + "justMyCode": false, + "autoReload": { + "enable": true + }, + "justMyCode": true, + }, + ] } \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 0d38da2f..f0f4822b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,49 +1,52 @@ -{ - "python.languageServer": "Pylance", - "editor.formatOnSave": true, - "notebook.formatOnSave.enabled": true, - "notebook.codeActionsOnSave": { - "notebook.source.fixAll": true, - "notebook.source.organizeImports": true, - }, - "[python]": { - "editor.formatOnSave": true, - "editor.codeActionsOnSave": { - "source.fixAll": "always", - "source.organizeImports": "always", - }, - "editor.defaultFormatter": "charliermarsh.ruff", - }, - "autoDocstring.docstringFormat": "numpy", - "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true, - "python.analysis.logLevel": "Warning", - "python.analysis.completeFunctionParens": false, - "python.analysis.diagnosticMode": "workspace", - "python.analysis.diagnosticSeverityOverrides": {}, - "python.analysis.indexing": true, - "python.analysis.autoImportCompletions": true, - "python.analysis.inlayHints.variableTypes": false, - "python.analysis.inlayHints.functionReturnTypes": false, - "python.analysis.inlayHints.pytestParameters": true, - "python.terminal.executeInFileDir": true, - "python.terminal.activateEnvironment": true, - "python.terminal.activateEnvInCurrentTerminal": false, - "python.analysis.packageIndexDepths": [ - { - "name": "pandas", - "depth": 4, - "includeAllSymbols": true, - }, - { - "name": "matplotlib", - "depth": 4, - "includeAllSymbols": true, - }, - { - "name": "mpl_toolkits", - "depth": 4, - "includeAllSymbols": true, - }, - ], -} +{ + "python.languageServer": "Pylance", + "editor.formatOnSave": true, + "notebook.formatOnSave.enabled": true, + "notebook.codeActionsOnSave": { + "notebook.source.fixAll": "explicit", + "notebook.source.organizeImports": "explicit", + }, + "[python]": { + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "always", + "source.organizeImports": "explicit", + }, + "editor.defaultFormatter": "charliermarsh.ruff", + }, + "autoDocstring.docstringFormat": "numpy", + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.analysis.logLevel": "Warning", + "python.analysis.completeFunctionParens": false, + "python.analysis.diagnosticMode": "workspace", + "python.analysis.indexing": true, + "python.analysis.autoImportCompletions": true, + "python.analysis.inlayHints.variableTypes": false, + "python.analysis.inlayHints.functionReturnTypes": false, + "python.analysis.inlayHints.pytestParameters": true, + "python.terminal.executeInFileDir": true, + "python.terminal.activateEnvironment": true, + "python.terminal.activateEnvInCurrentTerminal": false, + "python.analysis.packageIndexDepths": [ + { + "name": "pandas", + "depth": 4, + "includeAllSymbols": true, + }, + { + "name": "matplotlib", + "depth": 4, + "includeAllSymbols": true, + }, + { + "name": "mpl_toolkits", + "depth": 4, + "includeAllSymbols": true, + }, + ], + "mypy-type-checker.importStrategy": "fromEnvironment", + "mypy-type-checker.reportingScope": "workspace", + "mypy-type-checker.preferDaemon": false, + "ruff.configurationPreference": "filesystemFirst", +} \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index a856005a..7dde3703 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,452 +1,447 @@ -# Changelog - -All notable changes to the [ospx] project will be documented in this file.
-The changelog format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [Unreleased] - -### Dependencies -* Updated to ruff>=0.6.3 (from ruff==0.5.1) -* Updated to pyright>=1.1.378 (from pyright==1.1.371) -* Updated to sourcery>=1.22 (from sourcery==1.21) -* Updated to pytest>=8.3 (from pytest>=8.2) -* Updated to Sphinx>=8.0 (from Sphinx>=7.3) -* Updated to sphinx-argparse-cli>=1.17 (from sphinx-argparse-cli>=1.16) -* Updated to myst-parser>=4.0 (from myst-parser>=3.0) -* Updated to furo>=2024.8 (from furo>=2024.5) -* GitHub workflows: Install dependencies: change from 'pip install' to 'uv pip install' - -### Dependencies -* Updated to download-artifact@v4 (from download-artifact@v3) - -### Dependencies -* GitHub workflows: Replaced pip install tox with pip install tox-uv -* GitHub workflows: Removed cache: 'pip' for tox-uv compatibility -* GitHub workflows: Install dependencies: change singleline run statements to multiline run statements -* GitHub workflows: Add step to install 'uv' package -* GitHub workflows: Add step to install 'uv' package -* GitHub workflows: Install dependencies: change from 'pip install' to 'uv pip install' -* GitHub workflow _test_future.yml : updated Python version to 3.13.0-alpha - 3.13.0 -* GitHub workflow _test_future.yml : updated name of test job to 'test313' - -### Dependencies -* updated to black[jupyter]==24.4 (from black[jupyter]==23.12) -* updated to version: '==24.4' (from version: '==23.12') -* updated to ruff==0.5.1 (from ruff==0.4.2) -* updated to pyright==1.1.371 (from pyright==1.1.360) -* updated to setup-python@v5 (from setup-python@v4) -* updated to actions-gh-pages@v4 (from actions-gh-pages@v3) -* updated to upload-artifact@v4 (from upload-artifact@v3) -* updated to sourcery==1.21 (from sourcery==1.16) -* updated to dictIO>=0.3.4 (from dictIO>=0.3.3) -* updated to checkout@v4 (from checkout@v3) - - -## [0.2.14] - 2024-05-22 - -### Dependencies -* updated to ruff==0.4.2 (from ruff==0.2.1) -* updated to pyright==1.1.360 (from pyright==1.1.350) -* updated to sourcery==1.16 (from sourcery==1.15) -* updated to lxml>=5.2 (from lxml>=5.1) -* updated to types-lxml>=2024.4 (from types-lxml>=5.1) -* updated to pytest>=8.2 (from pytest>=7.4) -* updated to pytest-cov>=5.0 (from pytest-cov>=4.1) -* updated to Sphinx>=7.3 (from Sphinx>=7.2) -* updated to sphinx-argparse-cli>=1.15 (from sphinx-argparse-cli>=1.11) -* updated to myst-parser>=3.0 (from myst-parser>=2.0) -* updated to furo>=2024.4 (from furo>=2023.9.10) -* updated to numpy>=1.26,<2.0 (from numpy>=1.26) -* updated to matplotlib>=3.9 (from matplotlib>=3.8) -* updated to dictIO>=0.3.4 (from dictIO>=0.3.1) -* removed black - -### Changed -* replaced black formatter with ruff formatter -* Changed publishing workflow to use OpenID Connect (Trusted Publisher Management) when publishing to PyPI -* Updated copyright statement -* VS Code settings: Turned off automatic venv activation - - -## [0.2.13] - 2024-02-21 - -### Added -* README.md : Under `Development Setup`, added a step to install current package in "editable" mode, using the pip install -e option. -This removes the need to manually add /src to the PythonPath environment variable in order for debugging and tests to work. - -### Removed -* VS Code settings: Removed the setting which added the /src folder to PythonPath. This is no longer necessary. Installing the project itself as a package in "editable" mode, using the pip install -e option, solves the issue and removes the need to manually add /src to the PythonPath environment variable. - -### Changed -* Moved all project configuration from setup.cfg to pyproject.toml -* Moved all tox configuration from setup.cfg to tox.ini. -* Moved pytest configuration from pyproject.toml to pytest.ini -* Deleted setup.cfg - -### Dependencies -* updated to black[jupyter]==24.1 (from black[jupyter]==23.12) -* updated to version: '==24.1' (from version: '==23.12') -* updated to ruff==0.2.1 (from ruff==0.1.8) -* updated to pyright==1.1.350 (from pyright==1.1.338) -* updated to sourcery==1.15 (from sourcery==1.14) -* updated to lxml>=5.1 (from lxml>=4.9) -* updated to pandas>=2.2 (from pandas>=2.1) - - -## [0.2.12] - 2024-01-09 - -Maintenance Release - -### Dependencies - -* Updated to dictIO>=0.3.1 (from dictIO>=0.2.9) -* Updated other dependencies to latest versions - - -## [0.2.11] - 2023-09-25 - -### Dependencies - -* Updated dependencies to latest versions - - -## [0.2.10] - 2023-06-22 - -### Changed - -* Modularized GitHub workflows -* Changed default Python version in GitHub workflows from 3.10 to 3.11 - -### Dependencies - -* updated to dictIO>=0.2.8 -* requirements-dev.txt: Updated dependencies to latest versions - - -## [0.2.9] - 2023-05-04 - -### Changed - -* dependencies: updated dependencies to latest versions - - -## [0.2.8] - 2023-01-11 - -### Changed - -* Added missing DocStrings for public classes, methods and functions -* Changed links to package documentation to open README.html, not the default index page -* data classes: changed initialisation of mutable types to use default_factory -* ruff: added rule-set "B" (flake8-bugbear) - -### Dependencies - -* updated to dictIO>=0.2.6 - - -## [0.2.7] - 2023-01-04 - -### Changed - -* Linter: Migrated from flake8 to ruff.
- (Added ruff; removed flake8 and isort) -* Adjusted GitHub CI workflow accordingly.
- (Added ruff job; removed flake8 and isort jobs) -* VS Code settings: Adjusted Pylance configuration - -### Added - -* Added a batch file 'qa.bat' in root folder to ease local execution of code quality checks - -### Dependencies - -* updated to dictIO>=0.2.5 - - -## [0.2.6] - 2022-12-12 - -### Changed - -* Moved dev-only dependencies from requirements.txt to requirements-dev.txt -* ospx/`__init__`.py and ospx/fmi/`__init__`.py : ensured that imported symbols get also exported
- (added "as" clause -> "from x import y as y" instead of only "from x import y") -* Configured code quality tools flake8, black, isort, pyright -* Improved code quality, resolving all warnings and errors flagged by the configured code quality tools - (flake8, black, isort, pyright, sourcery) - -### Added - -* Added GitHub workflow 'main.yml' for continuous integration (runs all CI tasks except Sphinx) - * format checks: black, isort - * lint check: flake8, flake8-bugbear - * type check: pyright - * test: uses tox to run pytest on {Windows, Linux, MacOS} with {py39, py310} - * publish: publishing to PyPI (runs only on push of new tag vx.x.x, and after all other jobs succeeded) - * merge_to_release_branch: merge tagged commit to release branch (runs after publish) - -### Dependencies - -* updated to dictIO>=0.2.4 - - -## [0.2.5] - 2022-12-01 - -### Changed - -* variable.py: get_fmi_data_type(): - * Removed the elif branch 'isinstance(arg, Sequence)'.
- It caused problems as it falsely returned the FMI type 'Enumeration' also for strings.
- The respective elif branch is for the time being commented out.
- However, a proper solution is needed as soon as xs:enumeration is used in an OSP case.
- The problem is registered as [Issue #5](https://github.com/dnv-opensource/ospx/issues/5) -* Code formatting: Changed from yapf to black -* STYLEGUIDE.md : Adjusted to match black formatting -* VS Code settings: Updated to use black as formatter -* requirements.txt: Updated dependencies to their most recent versions -* GitHub actions (yml files): Updated following actions to their most recent versions: - * checkout@v1 -> checkout@v3 - * setup-python@v2 -> setup-python@v4 - * cache@v2 -> cache@v3 - -### Added - -* watchCosim: Added commandline option --scale
- (allows to scale the generated images by a factor) -* Added sourcery configuration (.sourcery.yaml) -* Added py.typed file into the package root folder and included it setup.cfg as package_data - - -## [0.2.4] - 2022-11-08 - -### Changed - -* Renamed module systemStructure.py to system.py
- Accordingly, renamed also class SystemStructure to System. - -* Renamed some attributes in FMU class - -* dependencies: - * upgraded to dictIO >= 0.2.2 (now supporting references and expressions in JSON dicts) - -### Added - -* \tests: Added spring_mass_damper example - -* \tests: Added test_fmu.py - -### Solved - -* watchCosim.py : Added try-except statements to catch TypeErrors and ValueErrors when trying to plot non-numerical variables (i.e. String or None) - - - -## [0.2.3] - 2022-10-05 - -### Solved - -* Importer:
- * Corrected a bug in OspSystemStructureImporter, where multiple connections between two components would not be imported (but only the last one survived). Now, also more than one connection in between two components are imported correctly. - * OspSystemStructureImporter now resolves the type of initial values. I.e. If an initial value in OspSystemStructure is denoted as literal '1' but with Type 'Real', then this initial value will be imported not as integer 1 but as float 1.0 - - -## [0.2.2] - 2022-10-05 - -### Solved - -* Connection:
- Corrected a bug in Connection.is_variable_connection() and Connection.is_variable_group_connection() which led to Variable Connections not being resolved. - - -## [0.2.1] - 2022-10-01 - -### Changed - -* OspSimulationCase:
- Changed setup(): FMU files get no longer copied into the case folder by default but stay where they are (i.e. in the library).
- Only if an FMU is not reachable by a relative path from the case folder, the FMU will get copied into the case folder. - - -* dependencies: - * upgraded to dictIO >= 0.2.0 - - -## [0.2.0] - 2022-09-28 - -### Solved - -* importer.py:
- Relative paths to libSource and FMUs are now properly resolved, relative to the target directory the OSPSystemStructure.xml is imported into (= folder in which the caseDict is created). - For libSource, by default the absolute path will be entered. This makes the caseDict insensitive when moved or copied into other (case) folders. - -### Changed - -* OSPModelDescription.xml:
- The handling of OSPModelDescription.xml files has changed: - * no OSPModelDescription.xml files get written by default - * existing OSPModelDescription.xml files will be kept - -* dependencies: - * upgraded to dictIO >= 0.1.2 - -### Added - -* OSPSystemStructure.xml: - * Added support for VariableGroups and VariableGroupConnections (as defined in OSP-IS).
- importSystemStructure is now also able to import OSPSystemStructure.xml files that use Connections of OSP-IS type 'VariableGroupConnection'. - - * Added support for stepSize attribute:
- If a \ element in OSPSystemStructure.xml explicitely defines the stepSize attribute, and if the value given for a \'s stepSize inside OSPSystemStructure.xml differs from the default stepSize defined in the FMU's ModelDescription.xml, then the stepSize defined in OSPSystemStructure.xml prevails and will also explicitely be included in the OSPSystemStructure.xml file written by ospCaseBuilder. - - - -## [0.1.2] - 2022-08-19 - -### Changed - -* variable.py: - * variable.start -> added type casting to setter property ensuring an already defined data_type of the variable is not altered when a new start value is set. - -* watchCosim.py - * put watchCosim in working state after time stepping, before changing over to individual data frames - * move *.csv files finally into folder /results - -* Protect png's in result folder from being deleted - -* ospCaseBuilder CLI: - * inspect mode (--inspect) now adds to the results the attributes of the DefaultExperiment element from the FMU's modelDescription.xml - -* plotting.py: - * added further exceptions for non-word characters in title strings - -* dependencies: - * ospx now uses dictIO v0.1.1 - -## [0.1.1] - 2022-05-30 - -### Changed - -* case dict file format: Removed 'root' element from '_environment' section, as it is obsolete. - -### Fixed - -* relative paths in the 'fmu' element led to a FileNotFound error. This is fixed now. - -## [0.1.0] - 2022-05-28 - -### Changed - -* Major refactoring, introducing classes for the main elements such as FMU, Component, SystemStructure etc. -* Simplified imports from namespace ospx. Example: - * Old (<= v0.0.22): - ~~~py - from ospx.ospCaseBuilder import OspCaseBuilder - ~~~ - * New: - ~~~py - from ospx import OspCaseBuilder - ~~~ -* Use new simplified imports from namespace dictIO (using updated version of dictIO package) -* Two changes were introduced in the case dict file format: - 1. Connector element: key 'reference' changed to 'variable':
- * Old (<= v0.0.22): - ~~~cpp - connectors - { - difference_input_minuend - { - reference difference.IN1; - type input; - } - ~~~ - * New: - ~~~cpp - connectors - { - difference_input_minuend - { - variable difference.IN1; - type input; - } - ~~~ - 2. Connection element: source and target changed from single strings to fully qualified endpoints, providing not only the connector but also the component the connector or variable belongs to: - * Old (<= v0.0.22): - ~~~cpp - connections - { - minuend_to_difference - { - source minuend_output; - target difference_input_minuend; - } - ~~~ - * New: - ~~~cpp - connections - { - minuend_to_difference - { - source - { - component minuend; - connector minuend_output; - } - target - { - component difference; - connector difference_input_minuend; - } - } - ~~~ - * Instead of connector, alternatively also a variable can be referenced in source / target endpoint. Example: - ~~~cpp - connections - { - minuend_to_difference - { - source - { - component minuend; - variable constVal.OUT; - } - target - { - component difference; - variable difference.IN1; - } - } - ~~~ - - -## [0.0.22] - 2022-05-09 - -* First public release - -## [0.0.17] - 2022-02-14 - -### Added - -* Added support for Python 3.10 - - -[unreleased]: https://github.com/dnv-opensource/ospx/compare/v0.2.14...HEAD -[0.2.14]: https://github.com/dnv-opensource/ospx/compare/v0.2.13...v0.2.14 -[0.2.13]: https://github.com/dnv-opensource/ospx/compare/v0.2.12...v0.2.13 -[0.2.12]: https://github.com/dnv-opensource/ospx/compare/v0.2.11...v0.2.12 -[0.2.11]: https://github.com/dnv-opensource/ospx/compare/v0.2.10...v0.2.11 -[0.2.10]: https://github.com/dnv-opensource/ospx/compare/v0.2.9...v0.2.10 -[0.2.9]: https://github.com/dnv-opensource/ospx/compare/v0.2.8...v0.2.9 -[0.2.8]: https://github.com/dnv-opensource/ospx/compare/v0.2.7...v0.2.8 -[0.2.7]: https://github.com/dnv-opensource/ospx/compare/v0.2.6...v0.2.7 -[0.2.6]: https://github.com/dnv-opensource/ospx/compare/v0.2.5...v0.2.6 -[0.2.5]: https://github.com/dnv-opensource/ospx/compare/v0.2.4...v0.2.5 -[0.2.4]: https://github.com/dnv-opensource/ospx/compare/v0.2.3...v0.2.4 -[0.2.3]: https://github.com/dnv-opensource/ospx/compare/v0.2.2...v0.2.3 -[0.2.2]: https://github.com/dnv-opensource/ospx/compare/v0.2.1...v0.2.2 -[0.2.1]: https://github.com/dnv-opensource/ospx/compare/v0.2.0...v0.2.1 -[0.2.0]: https://github.com/dnv-opensource/ospx/compare/v0.1.1...v0.2.0 -[0.1.2]: https://github.com/dnv-opensource/ospx/compare/v0.1.1...v0.1.2 -[0.1.1]: https://github.com/dnv-opensource/ospx/compare/v0.1.0...v0.1.1 -[0.1.0]: https://github.com/dnv-opensource/ospx/compare/v0.0.22...v0.1.0 -[0.0.22]: https://github.com/dnv-opensource/ospx/compare/v0.0.17...v0.0.22 -[0.0.17]: https://github.com/dnv-opensource/ospx/releases/tag/v0.0.17 -[ospx]: https://github.com/dnv-opensource/ospx +# Changelog + +All notable changes to the [ospx] project will be documented in this file.
+The changelog format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [Unreleased] + +### Changed +* Changed from `pip`/`tox` to `uv` as package manager +* README.md : Completely rewrote section "Development Setup", introducing `uv` as package manager. +* Changed publishing workflow to use OpenID Connect (Trusted Publisher Management) when publishing to PyPI +* Updated copyright statement +* VS Code settings: Turned off automatic venv activation +* Replaced black formatter with ruff formatter + +### Added +* Added `mypy` as static type checker (in addition to `pyright`) + +### GitHub workflows +* (all workflows): Adapted to use `uv` as package manager +* _test_future.yml : updated Python version to 3.13.0-alpha - 3.13.0 +* _test_future.yml : updated name of test job to 'test313' + +### Dependencies +* Updated to ruff>=0.6.3 (from ruff==0.4.2) +* Updated to pyright>=1.1.378 (from pyright==1.1.360) +* Updated to sourcery>=1.22 (from sourcery==1.16) +* Updated to pytest>=8.3 (from pytest>=8.2) +* Updated to Sphinx>=8.0 (from Sphinx>=7.3) +* Updated to sphinx-argparse-cli>=1.17 (from sphinx-argparse-cli>=1.16) +* Updated to myst-parser>=4.0 (from myst-parser>=3.0) +* Updated to furo>=2024.8 (from furo>=2024.5) +* updated to setup-python@v5 (from setup-python@v4) +* updated to actions-gh-pages@v4 (from actions-gh-pages@v3) +* updated to upload-artifact@v4 (from upload-artifact@v3) +* Updated to download-artifact@v4 (from download-artifact@v3) +* updated to checkout@v4 (from checkout@v3) + + +## [0.2.14] - 2024-05-22 + +### Dependencies +* updated to ruff==0.4.2 (from ruff==0.2.1) +* updated to pyright==1.1.360 (from pyright==1.1.350) +* updated to sourcery==1.16 (from sourcery==1.15) +* updated to lxml>=5.2 (from lxml>=5.1) +* updated to types-lxml>=2024.4 (from types-lxml>=5.1) +* updated to pytest>=8.2 (from pytest>=7.4) +* updated to pytest-cov>=5.0 (from pytest-cov>=4.1) +* updated to Sphinx>=7.3 (from Sphinx>=7.2) +* updated to sphinx-argparse-cli>=1.15 (from sphinx-argparse-cli>=1.11) +* updated to myst-parser>=3.0 (from myst-parser>=2.0) +* updated to furo>=2024.4 (from furo>=2023.9.10) +* updated to numpy>=1.26,<2.0 (from numpy>=1.26) +* updated to matplotlib>=3.9 (from matplotlib>=3.8) +* updated to dictIO>=0.3.4 (from dictIO>=0.3.1) +* removed black + +### Changed +* replaced black formatter with ruff formatter +* Changed publishing workflow to use OpenID Connect (Trusted Publisher Management) when publishing to PyPI +* Updated copyright statement +* VS Code settings: Turned off automatic venv activation + + +## [0.2.13] - 2024-02-21 + +### Added +* README.md : Under `Development Setup`, added a step to install current package in "editable" mode, using the pip install -e option. +This removes the need to manually add /src to the PythonPath environment variable in order for debugging and tests to work. + +### Removed +* VS Code settings: Removed the setting which added the /src folder to PythonPath. This is no longer necessary. Installing the project itself as a package in "editable" mode, using the pip install -e option, solves the issue and removes the need to manually add /src to the PythonPath environment variable. + +### Changed +* Moved all project configuration from setup.cfg to pyproject.toml +* Moved all tox configuration from setup.cfg to tox.ini. +* Moved pytest configuration from pyproject.toml to pytest.ini +* Deleted setup.cfg + +### Dependencies +* updated to black[jupyter]==24.1 (from black[jupyter]==23.12) +* updated to version: '==24.1' (from version: '==23.12') +* updated to ruff==0.2.1 (from ruff==0.1.8) +* updated to pyright==1.1.350 (from pyright==1.1.338) +* updated to sourcery==1.15 (from sourcery==1.14) +* updated to lxml>=5.1 (from lxml>=4.9) +* updated to pandas>=2.2 (from pandas>=2.1) + + +## [0.2.12] - 2024-01-09 + +Maintenance Release + +### Dependencies + +* Updated to dictIO>=0.3.1 (from dictIO>=0.2.9) +* Updated other dependencies to latest versions + + +## [0.2.11] - 2023-09-25 + +### Dependencies + +* Updated dependencies to latest versions + + +## [0.2.10] - 2023-06-22 + +### Changed + +* Modularized GitHub workflows +* Changed default Python version in GitHub workflows from 3.10 to 3.11 + +### Dependencies + +* updated to dictIO>=0.2.8 +* requirements-dev.txt: Updated dependencies to latest versions + + +## [0.2.9] - 2023-05-04 + +### Changed + +* dependencies: updated dependencies to latest versions + + +## [0.2.8] - 2023-01-11 + +### Changed + +* Added missing DocStrings for public classes, methods and functions +* Changed links to package documentation to open README.html, not the default index page +* data classes: changed initialisation of mutable types to use default_factory +* ruff: added rule-set "B" (flake8-bugbear) + +### Dependencies + +* updated to dictIO>=0.2.6 + + +## [0.2.7] - 2023-01-04 + +### Changed + +* Linter: Migrated from flake8 to ruff.
+ (Added ruff; removed flake8 and isort) +* Adjusted GitHub CI workflow accordingly.
+ (Added ruff job; removed flake8 and isort jobs) +* VS Code settings: Adjusted Pylance configuration + +### Added + +* Added a batch file 'qa.bat' in root folder to ease local execution of code quality checks + +### Dependencies + +* updated to dictIO>=0.2.5 + + +## [0.2.6] - 2022-12-12 + +### Changed + +* Moved dev-only dependencies from requirements.txt to requirements-dev.txt +* ospx/`__init__`.py and ospx/fmi/`__init__`.py : ensured that imported symbols get also exported
+ (added "as" clause -> "from x import y as y" instead of only "from x import y") +* Configured code quality tools flake8, black, isort, pyright +* Improved code quality, resolving all warnings and errors flagged by the configured code quality tools + (flake8, black, isort, pyright, sourcery) + +### Added + +* Added GitHub workflow 'main.yml' for continuous integration (runs all CI tasks except Sphinx) + * format checks: black, isort + * lint check: flake8, flake8-bugbear + * type check: pyright + * test: uses tox to run pytest on {Windows, Linux, MacOS} with {py39, py310} + * publish: publishing to PyPI (runs only on push of new tag vx.x.x, and after all other jobs succeeded) + * merge_to_release_branch: merge tagged commit to release branch (runs after publish) + +### Dependencies + +* updated to dictIO>=0.2.4 + + +## [0.2.5] - 2022-12-01 + +### Changed + +* variable.py: get_fmi_data_type(): + * Removed the elif branch 'isinstance(arg, Sequence)'.
+ It caused problems as it falsely returned the FMI type 'Enumeration' also for strings.
+ The respective elif branch is for the time being commented out.
+ However, a proper solution is needed as soon as xs:enumeration is used in an OSP case.
+ The problem is registered as [Issue #5](https://github.com/dnv-opensource/ospx/issues/5) +* Code formatting: Changed from yapf to black +* STYLEGUIDE.md : Adjusted to match black formatting +* VS Code settings: Updated to use black as formatter +* requirements.txt: Updated dependencies to their most recent versions +* GitHub actions (yml files): Updated following actions to their most recent versions: + * checkout@v1 -> checkout@v3 + * setup-python@v2 -> setup-python@v4 + * cache@v2 -> cache@v3 + +### Added + +* watchCosim: Added commandline option --scale
+ (allows to scale the generated images by a factor) +* Added sourcery configuration (.sourcery.yaml) +* Added py.typed file into the package root folder and included it setup.cfg as package_data + + +## [0.2.4] - 2022-11-08 + +### Changed + +* Renamed module systemStructure.py to system.py
+ Accordingly, renamed also class SystemStructure to System. + +* Renamed some attributes in FMU class + +* dependencies: + * upgraded to dictIO >= 0.2.2 (now supporting references and expressions in JSON dicts) + +### Added + +* \tests: Added spring_mass_damper example + +* \tests: Added test_fmu.py + +### Solved + +* watchCosim.py : Added try-except statements to catch TypeErrors and ValueErrors when trying to plot non-numerical variables (i.e. String or None) + + + +## [0.2.3] - 2022-10-05 + +### Solved + +* Importer:
+ * Corrected a bug in OspSystemStructureImporter, where multiple connections between two components would not be imported (but only the last one survived). Now, also more than one connection in between two components are imported correctly. + * OspSystemStructureImporter now resolves the type of initial values. I.e. If an initial value in OspSystemStructure is denoted as literal '1' but with Type 'Real', then this initial value will be imported not as integer 1 but as float 1.0 + + +## [0.2.2] - 2022-10-05 + +### Solved + +* Connection:
+ Corrected a bug in Connection.is_variable_connection() and Connection.is_variable_group_connection() which led to Variable Connections not being resolved. + + +## [0.2.1] - 2022-10-01 + +### Changed + +* OspSimulationCase:
+ Changed setup(): FMU files get no longer copied into the case folder by default but stay where they are (i.e. in the library).
+ Only if an FMU is not reachable by a relative path from the case folder, the FMU will get copied into the case folder. + + +* dependencies: + * upgraded to dictIO >= 0.2.0 + + +## [0.2.0] - 2022-09-28 + +### Solved + +* importer.py:
+ Relative paths to libSource and FMUs are now properly resolved, relative to the target directory the OSPSystemStructure.xml is imported into (= folder in which the caseDict is created). + For libSource, by default the absolute path will be entered. This makes the caseDict insensitive when moved or copied into other (case) folders. + +### Changed + +* OSPModelDescription.xml:
+ The handling of OSPModelDescription.xml files has changed: + * no OSPModelDescription.xml files get written by default + * existing OSPModelDescription.xml files will be kept + +* dependencies: + * upgraded to dictIO >= 0.1.2 + +### Added + +* OSPSystemStructure.xml: + * Added support for VariableGroups and VariableGroupConnections (as defined in OSP-IS).
+ importSystemStructure is now also able to import OSPSystemStructure.xml files that use Connections of OSP-IS type 'VariableGroupConnection'. + + * Added support for stepSize attribute:
+ If a \ element in OSPSystemStructure.xml explicitely defines the stepSize attribute, and if the value given for a \'s stepSize inside OSPSystemStructure.xml differs from the default stepSize defined in the FMU's ModelDescription.xml, then the stepSize defined in OSPSystemStructure.xml prevails and will also explicitely be included in the OSPSystemStructure.xml file written by ospCaseBuilder. + + + +## [0.1.2] - 2022-08-19 + +### Changed + +* variable.py: + * variable.start -> added type casting to setter property ensuring an already defined data_type of the variable is not altered when a new start value is set. + +* watchCosim.py + * put watchCosim in working state after time stepping, before changing over to individual data frames + * move *.csv files finally into folder /results + +* Protect png's in result folder from being deleted + +* ospCaseBuilder CLI: + * inspect mode (--inspect) now adds to the results the attributes of the DefaultExperiment element from the FMU's modelDescription.xml + +* plotting.py: + * added further exceptions for non-word characters in title strings + +* dependencies: + * ospx now uses dictIO v0.1.1 + +## [0.1.1] - 2022-05-30 + +### Changed + +* case dict file format: Removed 'root' element from '_environment' section, as it is obsolete. + +### Fixed + +* relative paths in the 'fmu' element led to a FileNotFound error. This is fixed now. + +## [0.1.0] - 2022-05-28 + +### Changed + +* Major refactoring, introducing classes for the main elements such as FMU, Component, SystemStructure etc. +* Simplified imports from namespace ospx. Example: + * Old (<= v0.0.22): + ~~~py + from ospx.ospCaseBuilder import OspCaseBuilder + ~~~ + * New: + ~~~py + from ospx import OspCaseBuilder + ~~~ +* Use new simplified imports from namespace dictIO (using updated version of dictIO package) +* Two changes were introduced in the case dict file format: + 1. Connector element: key 'reference' changed to 'variable':
+ * Old (<= v0.0.22): + ~~~cpp + connectors + { + difference_input_minuend + { + reference difference.IN1; + type input; + } + ~~~ + * New: + ~~~cpp + connectors + { + difference_input_minuend + { + variable difference.IN1; + type input; + } + ~~~ + 2. Connection element: source and target changed from single strings to fully qualified endpoints, providing not only the connector but also the component the connector or variable belongs to: + * Old (<= v0.0.22): + ~~~cpp + connections + { + minuend_to_difference + { + source minuend_output; + target difference_input_minuend; + } + ~~~ + * New: + ~~~cpp + connections + { + minuend_to_difference + { + source + { + component minuend; + connector minuend_output; + } + target + { + component difference; + connector difference_input_minuend; + } + } + ~~~ + * Instead of connector, alternatively also a variable can be referenced in source / target endpoint. Example: + ~~~cpp + connections + { + minuend_to_difference + { + source + { + component minuend; + variable constVal.OUT; + } + target + { + component difference; + variable difference.IN1; + } + } + ~~~ + + +## [0.0.22] - 2022-05-09 + +* First public release + +## [0.0.17] - 2022-02-14 + +### Added + +* Added support for Python 3.10 + + +[unreleased]: https://github.com/dnv-opensource/ospx/compare/v0.2.14...HEAD +[0.2.14]: https://github.com/dnv-opensource/ospx/compare/v0.2.13...v0.2.14 +[0.2.13]: https://github.com/dnv-opensource/ospx/compare/v0.2.12...v0.2.13 +[0.2.12]: https://github.com/dnv-opensource/ospx/compare/v0.2.11...v0.2.12 +[0.2.11]: https://github.com/dnv-opensource/ospx/compare/v0.2.10...v0.2.11 +[0.2.10]: https://github.com/dnv-opensource/ospx/compare/v0.2.9...v0.2.10 +[0.2.9]: https://github.com/dnv-opensource/ospx/compare/v0.2.8...v0.2.9 +[0.2.8]: https://github.com/dnv-opensource/ospx/compare/v0.2.7...v0.2.8 +[0.2.7]: https://github.com/dnv-opensource/ospx/compare/v0.2.6...v0.2.7 +[0.2.6]: https://github.com/dnv-opensource/ospx/compare/v0.2.5...v0.2.6 +[0.2.5]: https://github.com/dnv-opensource/ospx/compare/v0.2.4...v0.2.5 +[0.2.4]: https://github.com/dnv-opensource/ospx/compare/v0.2.3...v0.2.4 +[0.2.3]: https://github.com/dnv-opensource/ospx/compare/v0.2.2...v0.2.3 +[0.2.2]: https://github.com/dnv-opensource/ospx/compare/v0.2.1...v0.2.2 +[0.2.1]: https://github.com/dnv-opensource/ospx/compare/v0.2.0...v0.2.1 +[0.2.0]: https://github.com/dnv-opensource/ospx/compare/v0.1.1...v0.2.0 +[0.1.2]: https://github.com/dnv-opensource/ospx/compare/v0.1.1...v0.1.2 +[0.1.1]: https://github.com/dnv-opensource/ospx/compare/v0.1.0...v0.1.1 +[0.1.0]: https://github.com/dnv-opensource/ospx/compare/v0.0.22...v0.1.0 +[0.0.22]: https://github.com/dnv-opensource/ospx/compare/v0.0.17...v0.0.22 +[0.0.17]: https://github.com/dnv-opensource/ospx/releases/tag/v0.0.17 +[ospx]: https://github.com/dnv-opensource/ospx diff --git a/README.md b/README.md index 39c6fbb4..77b310ab 100644 --- a/README.md +++ b/README.md @@ -1,132 +1,180 @@ -# ospx -ospx is an extension package to [farn][farn_docs], adding support to build [OSP][osp_docs] (co-)simulation cases using functional mockup units (FMUs). - -ospx supports -* building of case-specific [OSP][osp_docs] (co-)simulation configuration files -* watching the progress of cosim, and saving final simulation results as a pandas dataframe. - -## Installation - -```sh -pip install ospx -``` -ospx requires the following (sub-)package: -* [dictIO][dictIO_docs]: foundation package, enabling ospx to handle configuration files in dictIO dict file format. - -However, dictIO gets installed automatically with ospx. - -## Usage Example - -ospx provides both an API for use inside Python as well as a CLI for shell execution of core functions. - -Reading a caseDict file and building the case-specific OSP (co-)simulation configuration files: -```py -from ospx import OspCaseBuilder - -OspCaseBuilder.build('caseDict') -``` - -The above task can also be invoked from the command line, using the 'ospCaseBuilder' command line script installed with ospx: -```sh -ospCaseBuilder caseDict -``` - -_For more examples and usage, please refer to [ospx's documentation][ospx_docs]._ - -## File Format -A caseDict is a file in dictIO dict file format used with farn. - -_For a documentation of the caseDict file format, see [File Format](fileFormat.md) in [ospx's documentation][ospx_docs] on GitHub Pages._ - -_For a detailed documentation of the dictIO dict file format used by farn, see [dictIO's documentation][dictIO_docs] on GitHub Pages._ - -## Development Setup - -1. Install Python 3.9 or higher, i.e. [Python 3.10](https://www.python.org/downloads/release/python-3104/) or [Python 3.11](https://www.python.org/downloads/release/python-3114/) - -2. Update pip and setuptools: - - ```sh - python -m pip install --upgrade pip setuptools - ``` - -3. git clone the dictIO repository into your local development directory: - - ```sh - git clone https://github.com/dnv-opensource/ospx path/to/your/dev/ospx - ``` - -4. In the ospx root folder: - - Create a Python virtual environment: - - ```sh - python -m venv .venv - ``` - - Activate the virtual environment: - - ..on Windows: - - ```sh - > .venv\Scripts\activate.bat - ``` - - ..on Linux: - - ```sh - source .venv/bin/activate - ``` - - Update pip and setuptools: - - ```sh - (.venv) $ python -m pip install --upgrade pip setuptools - ``` - - Install ospx's dependencies: - ```sh - (.venv) $ pip install -r requirements-dev.txt - ``` - This should return without errors. - - Finally, install ospx itself, yet not as a regular package but as an _editable_ package instead, using the pip install option -e: - ```sh - (.venv) $ pip install -e . - ``` - -5. Test that the installation works (in the ospx root folder): - - ```sh - (.venv) $ pytest . - ``` - -## Meta - -Copyright (c) 2024 [DNV](https://www.dnv.com) SE. All rights reserved. - -Frank Lumpitzsch – [@LinkedIn](https://www.linkedin.com/in/frank-lumpitzsch-23013196/) – frank.lumpitzsch@dnv.com - -Claas Rostock – [@LinkedIn](https://www.linkedin.com/in/claasrostock/?locale=en_US) – claas.rostock@dnv.com - -Seunghyeon Yoo – [@LinkedIn](https://www.linkedin.com/in/seunghyeon-yoo-3625173b/) – seunghyeon.yoo@dnv.com - -Distributed under the MIT license. See [LICENSE](LICENSE.md) for more information. - -[https://github.com/dnv-opensource/ospx](https://github.com/dnv-opensource/ospx) - -## Contributing - -1. Fork it () -2. Create your branch (`git checkout -b my-branch-name`) -3. Commit your changes (`git commit -am 'place a descriptive commit message here'`) -4. Push to the branch (`git push origin my-branch-name`) -5. Create a new Pull Request in GitHub - -For your contribution, please make sure you follow the [STYLEGUIDE](STYLEGUIDE.md) before creating the Pull Request. - - -[dictIO_docs]: https://dnv-opensource.github.io/dictIO/README.html -[ospx_docs]: https://dnv-opensource.github.io/ospx/README.html -[farn_docs]: https://dnv-opensource.github.io/farn/README.html -[osp_docs]: https://open-simulation-platform.github.io/ +[![pypi](https://img.shields.io/pypi/v/ospx.svg?color=blue)](https://pypi.python.org/pypi/ospx) +[![versions](https://img.shields.io/pypi/pyversions/ospx.svg?color=blue)](https://pypi.python.org/pypi/ospx) +[![license](https://img.shields.io/pypi/l/ospx.svg)](https://github.com/dnv-opensource/ospx/blob/main/LICENSE) +![ci](https://img.shields.io/github/actions/workflow/status/dnv-opensource/ospx/.github%2Fworkflows%2Fnightly_build.yml?label=ci) +[![docs](https://img.shields.io/github/actions/workflow/status/dnv-opensource/ospx/.github%2Fworkflows%2Fpush_to_release.yml?label=docs)][ospx_docs] + +# ospx +ospx is an extension package to [farn][farn_docs], adding support to build [OSP][osp_docs] (co-)simulation cases using functional mockup units (FMUs). + +ospx supports +* building of case-specific [OSP][osp_docs] (co-)simulation configuration files +* watching the progress of cosim, and saving final simulation results as a pandas dataframe. + +## Installation + +```sh +pip install ospx +``` +ospx requires the following (sub-)package: +* [dictIO][dictIO_docs]: foundation package, enabling ospx to handle configuration files in dictIO dict file format. + +However, dictIO gets installed automatically with ospx. + +## Usage Example + +ospx provides both an API for use inside Python as well as a CLI for shell execution of core functions. + +Reading a caseDict file and building the case-specific OSP (co-)simulation configuration files: +```py +from ospx import OspCaseBuilder + +OspCaseBuilder.build('caseDict') +``` + +The above task can also be invoked from the command line, using the 'ospCaseBuilder' command line script installed with ospx: +```sh +ospCaseBuilder caseDict +``` + +_For more examples and usage, please refer to [ospx's documentation][ospx_docs]._ + +## File Format +A caseDict is a file in dictIO dict file format used with farn. + +_For a documentation of the caseDict file format, see [File Format](fileFormat.md) in [ospx's documentation][ospx_docs] on GitHub Pages._ + +_For a detailed documentation of the dictIO dict file format used by farn, see [dictIO's documentation][dictIO_docs] on GitHub Pages._ + +## Development Setup + +### 1. Install uv +This project uses `uv` as package manager. +If you haven't already, install [uv](https://docs.astral.sh/uv), preferably using it's ["Standalone installer"](https://docs.astral.sh/uv/getting-started/installation/#__tabbed_1_2) method:
+..on Windows: +```sh +powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex" +``` +..on MacOS and Linux: +```sh +curl -LsSf https://astral.sh/uv/install.sh | sh +``` +(see [docs.astral.sh/uv](https://docs.astral.sh/uv/getting-started/installation/) for all / alternative installation methods.) + +Once installed, you can update `uv` to its latest version, anytime, by running: +```sh +uv self update +``` + +### 2. Install Python +This project requires Python 3.10 or later.
+If you don't already have a compatible version installed on your machine, the probably most comfortable way to install Python is through `uv`: +```sh +uv python install +``` +This will install the latest stable version of Python into the uv Python directory, i.e. as a uv-managed version of Python. + +Alternatively, and if you want a standalone version of Python on your machine, you can install Python either via `winget`: +```sh +winget install --id Python.Python +``` +or you can download and install Python from the [python.org](https://www.python.org/downloads/) website. + +### 3. Clone the repository +Clone the dictIO repository into your local development directory: +```sh +git clone https://github.com/dnv-opensource/ospx path/to/your/dev/ospx +``` + +### 4. Install dependencies +Run `uv sync` to create a virtual environment and install all project dependencies into it: +```sh +uv sync +``` + +### 5. (Optional) Install CUDA support +Run `uv sync` with option `--extra cuda` to in addition install torch with CUDA support: +```sh +uv sync --extra cuda +``` + +Alternatively, you can manually install torch with CUDA support. +_Note 1_: Do this preferably _after_ running `uv sync`. That way you ensure a virtual environment exists, which is a prerequisite before you install torch with CUDA support using below `uv pip install` command. + +To manually install torch with CUDA support, generate a `uv pip install` command matching your local machine's operating system using the wizard on the official [PyTorch website](https://pytorch.org/get-started/locally/). +_Note_: As we use `uv` as package manager, remember to replace `pip` in the command generated by the wizard with `uv pip`. + +If you are on Windows, the resulting `uv pip install` command will most likely look something like this: +```sh +uv pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu124 +``` + +_Hint:_ If you are unsure which cuda version to indicate in above `uv pip install .. /cuXXX` command, you can use the shell command `nvidia-smi` on your local system to find out the cuda version supported by the current graphics driver installed on your system. When then generating the `uv pip install` command with the wizard from the [PyTorch website](https://pytorch.org/get-started/locally/), select the cuda version that matches the major version of what your graphics driver supports (major version must match, minor version may deviate). + + +### 6. (Optional) Activate the virtual environment +When using `uv`, there is in almost all cases no longer a need to manually activate the virtual environment.
+`uv` will find the `.venv` virtual environment in the working directory or any parent directory, and activate it on the fly whenever you run a command via `uv` inside your project folder structure: +```sh +uv run +``` + +However, you still _can_ manually activate the virtual environment if needed. +When developing in an IDE, for instance, this can in some cases be necessary depending on your IDE settings. +To manually activate the virtual environment, run one of the "known" legacy commands:
+..on Windows: +```sh +.venv\Scripts\activate.bat +``` +..on Linux: +```sh +source .venv/bin/activate +``` + +### 7. Install pre-commit hooks +The `.pre-commit-config.yaml` file in the project root directory contains a configuration for pre-commit hooks. +To install the pre-commit hooks defined therein in your local git repository, run: +```sh +uv run pre-commit install +``` + +All pre-commit hooks configured in `.pre-commit-config.yaml` will now run each time you commit changes. + + +### 8. Test that the installation works +To test that the installation works, run pytest in the project root folder: +```sh +uv run pytest +``` + +## Meta + +Copyright (c) 2024 [DNV](https://www.dnv.com) SE. All rights reserved. + +Frank Lumpitzsch – [@LinkedIn](https://www.linkedin.com/in/frank-lumpitzsch-23013196/) – frank.lumpitzsch@dnv.com + +Claas Rostock – [@LinkedIn](https://www.linkedin.com/in/claasrostock/?locale=en_US) – claas.rostock@dnv.com + +Seunghyeon Yoo – [@LinkedIn](https://www.linkedin.com/in/seunghyeon-yoo-3625173b/) – seunghyeon.yoo@dnv.com + +Distributed under the MIT license. See [LICENSE](LICENSE.md) for more information. + +[https://github.com/dnv-opensource/ospx](https://github.com/dnv-opensource/ospx) + +## Contributing + +1. Fork it () +2. Create an issue in your GitHub repo +3. Create your branch based on the issue number and type (`git checkout -b issue-name`) +4. Evaluate and stage the changes you want to commit (`git add -i`) +5. Commit your changes (`git commit -am 'place a descriptive commit message here'`) +6. Push to the branch (`git push origin issue-name`) +7. Create a new Pull Request in GitHub + +For your contribution, please make sure you follow the [STYLEGUIDE](STYLEGUIDE.md) before creating the Pull Request. + + +[dictIO_docs]: https://dnv-opensource.github.io/dictIO/README.html +[ospx_docs]: https://dnv-opensource.github.io/ospx/README.html +[farn_docs]: https://dnv-opensource.github.io/farn/README.html +[osp_docs]: https://open-simulation-platform.github.io/ diff --git a/STYLEGUIDE.md b/STYLEGUIDE.md index bc79f85f..d4ab3f89 100644 --- a/STYLEGUIDE.md +++ b/STYLEGUIDE.md @@ -1,424 +1,423 @@ - -# Style Guide - -All code shall be [black](https://pypi.org/project/black/) formatted. - -References, details as well as examples of bad/good styles and their respective reasoning can be found below. - -## References - -* [PEP-8](https://www.python.org/dev/peps/pep-0008/) (see also [pep8.org](https://pep8.org/)) -* [PEP-257](https://www.python.org/dev/peps/pep-0257/) -* Python style guide by [theluminousmen.com](https://luminousmen.com/post/the-ultimate-python-style-guidelines) -* [Documenting Python Code: A Complete Guide](https://realpython.com/documenting-python-code) -* [Jupyter](https://jupyter.readthedocs.io/en/latest/contributing/ipython-dev-guide/coding_style.html) style guide -* Python style guide on [learnpython.com](https://learnpython.com/blog/python-coding-best-practices-and-style-guidelines/) -* [flake8](https://flake8.pycqa.org/en/latest/) -* [black](https://pypi.org/project/black/) - -## Code Layout - -* Use 4 spaces instead of tabs -* Maximum line length is 88 characters (not 79 as proposed in [PEP-8](https://www.python.org/dev/peps/pep-0008/)) -* 2 blank lines between classes and functions -* 1 blank line within class, between class methods -* Use blank lines for logic separation of functionality within functions/methods wherever it is justified -* No whitespace adjacent to parentheses, brackets, or braces - -```py - # Bad - spam( items[ 1 ], { key1 : arg1, key2 : arg2 }, ) - - # Good - spam(items[1], {key1: arg1, key2: arg2}, []) -``` - -* Surround operators with single whitespace on either side. - -```py - # Bad - x<1 - - # Good - x == 1 -``` - -* Never end your lines with a semicolon, and do not use a semicolon to put two statements on the same line -* When branching, always start a new block on a new line - -```py - # Bad - if flag: return None - - # Good - if flag: - return None -``` - -* Similarly to branching, do not write methods on one line in any case: - -```py - # Bad - def do_something(self): print("Something") - - # Good - def do_something(self): - print("Something") -``` - -* Place a class's `__init__` function (the constructor) always at the beginning of the class - -## Line Breaks - -* If function arguments do not fit into the specified line length, move them to a new line with indentation - -```py - # Bad - def long_function_name(var_one, var_two, var_three, - var_four): - print(var_one) - - # Bad - def long_function_name(var_one, var_two, var_three, - var_four): - print(var_one) - - # Better (but not preferred) - def long_function_name(var_one, - var_two, - var_three, - var_four): - print(var_one) - - # Good (and preferred) - def long_function_name( - var_one, - var_two, - var_three, - var_four, - ): - print(var_one) -``` - -* Move concatenated logical conditions to new lines if the line does not fit the maximum line size. This will help you understand the condition by looking from top to bottom. Poor formatting makes it difficult to read and understand complex predicates. - -```py - # Good - if ( - this_is_one_thing - and that_is_another_thing - or that_is_third_thing - or that_is_yet_another_thing - and one_more_thing - ): - do_something() -``` - -* Where binary operations stretch multiple lines, break lines before the binary operators, not thereafter - -```py - # Bad - GDP = ( - private_consumption + - gross_investment + - government_investment + - government_spending + - (exports - imports) - ) - - # Good - GDP = ( - private_consumption - + gross_investment - + government_investment - + government_spending - + (exports - imports) - ) -``` - -* Chaining methods should be broken up on multiple lines for better readability - -```py - ( - df.write.format("jdbc") - .option("url", "jdbc:postgresql:dbserver") - .option("dbtable", "schema.tablename") - .option("user", "username") - .option("password", "password") - .save() - ) -``` - -* Add a trailing comma to sequences of items when the closing container token ], ), or } does not appear on the same line as the final element - -```py - # Bad - y = [ - 0, - 1, - 4, - 6 - ] - z = { - 'a': 1, - 'b': 2 - } - - # Good - x = [1, 2, 3] - - # Good - y = [ - 0, - 1, - 4, - 6, <- note the trailing comma - ] - z = { - 'a': 1, - 'b': 2, <- note the trailing comma - } -``` - -## String Formatting - -* When quoting string literals, use double-quoted strings. When the string itself contains single or double quote characters, however, use the respective other one to avoid backslashes in the string. It improves readability. -* Use f-strings to format strings: - -```py - # Bad - print("Hello, %s. You are %s years old. You are a %s." % (name, age, profession)) - - # Good - print(f"Hello, {name}. You are {age} years old. You are a {profession}.") -``` - -* Use multiline strings, not \ , since it gets much more readable. - -```py - raise AttributeError( - "Here is a multiline error message with a very long first line " - "and a shorter second line." - ) -``` - -## Naming Conventions - -* For module names: `lowercase` . -Long module names can have words separated by underscores (`really_long_module_name.py`), but this is not required. Try to use the convention of nearby files. -* For class names: `CamelCase` -* For methods, functions, variables and attributes: `lowercase_with_underscores` -* For constants: `UPPERCASE` or `UPPERCASE_WITH_UNDERSCORES` -(Python does not differentiate between variables and constants. Using UPPERCASE for constants is just a convention, but helps a lot to quickly identify variables meant to serve as constants.) -* Implementation-specific private methods and variables will use `_single_underscore_prefix` -* Don't include the type of a variable in its name. - E.g. use `senders` instead of `sender_list` -* Names shall be clear about what a variable, class, or function contains or does. If you struggle to come up with a clear name, rethink your architecture: Often, the difficulty in finding a crisp name for something is a hint that separation of responsibilities can be improved. The solution then is less to agree on a name, but to start a round of refactoring: The name you're seeking often comes naturally then with refactoring to an improved architecture with clear responsibilities. -(see [SRP](https://en.wikipedia.org/wiki/Single-responsibility_principle), Single-Responsibilty Principle by Robert C. Martin) - -## Named Arguments - -* Use named arguments to improve readability and avoid mistakes introduced with future code maintenance - -```py - # Bad - urlget("[http://google.com](http://google.com/)", 20) - - # Good - urlget("[http://google.com](http://google.com/)", timeout=20) -``` - -* Never use mutable objects as default arguments in Python. If an attribute in a class or a named parameter in a function is of a mutable data type (e.g. a list or dict), never set its default value in the declaration of an object but always set it to None first, and then only later assign the default value in the class's constructor, or the functions body, respectively. Sounds complicated? If you prefer the shortcut, the examples below are your friend. -If your are interested in the long story including the why‘s, read these discussions on [Reddit](https://old.reddit.com/r/Python/comments/opb7hm/do_not_use_mutable_objects_as_default_arguments/) and [Twitter](https://twitter.com/willmcgugan/status/1419616480971399171). - -```py - # Bad - class Foo: - items = [] - - # Good - class Foo: - items = None - def __init__(self): - self.items = [] - - - # Bad - class Foo: - def __init__(self, items=[]): - self.items = items - - # Good - class Foo: - def __init__(self, items=None): - self.items = items or [] - - - # Bad - def some_function(x, y, items=[]): - ... - - # Good - def some_function(x, y, items=None): - items = items or [] - ... -``` - -## Commenting - -* First of all, if the code needs comments to clarify its work, you should think about refactoring it. The best comment to code is the code itself. -* Describe complex, possibly incomprehensible points and side effects in the comments -* Separate `#` and the comment with one whitespace - -```py - #bad comment - # good comment -``` - -* Use inline comments sparsely -* Where used, inline comments shall have 2 whitespaces before the `#` and one whitespace thereafter - -```py - x = y + z # inline comment - str1 = str2 + str3 # another inline comment -``` - -* If a piece of code is poorly understood, mark the piece with a `@TODO:` tag and your name to support future refactoring: - -```py - def get_ancestors_ids(self): - # @TODO: Do a cache reset while saving the category tree. CLAROS, YYYY-MM-DD - cache_name = f"{self._meta.model_name}_ancestors_{self.pk}" - cached_ids = cache.get(cache_name) - if cached_ids: - return cached_ids - - ids = [c.pk for c in self.get_ancestors(include_self=True)] - cache.set(cache_name, ids, timeout=3600) - - return ids -``` - -## Type hints - -* Use type hints in function signatures and module-scope variables. This is good documentation and can be used with linters for type checking and error checking. Use them whenever possible. -* Use pyi files to type annotate third-party or extension modules. - -## Docstrings - -* All Docstrings should be written in [Numpy](https://numpydoc.readthedocs.io/en/latest/format.html) format. For a good tutorial on Docstrings, see [Documenting Python Code: A Complete Guide](https://realpython.com/documenting-python-code) -* In a Docstring, summarize function/method behavior and document its arguments, return value(s), side effects, exceptions raised, and restrictions -* Wrap Docstrings with triple double quotes (""") -* The description of the arguments must be indented - -```py - def some_method(name, print=False): - """This function does something - - Parameters - ---------- - name : str - The name to use - print: bool, optional - A flag used to print the name to the console, by default False - - Raises - ------ - KeyError - If name is not found - - Returns - ------- - int - The return code - """ - ... - return 0 -``` - -## Exceptions - -* Raise specific exceptions and catch specific exceptions, such as KeyError, ValueError, etc. -* Do not raise or catch just Exception, except in rare cases where this is unavoidable, such as a try/except block on the top-level loop of some long-running process. For a good tutorial on why this matters, see [The Most Diabolical Python Antipattern](https://realpython.com/the-most-diabolical-python-antipattern/). -* Minimize the amount of code in a try/except block. The larger the body of the try, - the more likely that an exception will be raised by a line of code that you didn’t expect to raise an exception. - -## Imports - -* Avoid creating circular imports by importing modules more specialized than the one you are editing -* Relative imports are forbidden ([PEP-8](https://www.python.org/dev/peps/pep-0008/) only “highly discourages” them). Where absolutely needed, the `from future import absolute_import` syntax should be used (see [PEP-328](https://www.python.org/dev/peps/pep-0328/)) -* Never use wildcard imports (`from import *`). Always be explicit about what you're importing. Namespaces make code easier to read, so use them. -* Break long imports using parentheses and indent by 4 spaces. Include the trailing comma after the last import and place the closing bracket on a separate line - -```py - from my_pkg.utils import ( - some_utility_method_1, - some_utility_method_2, - some_utility_method_3, - some_utility_method_4, - some_utility_method_5, - ) -``` - -* Imports should be written in the following order, separated by a blank line: - 1. build-in modules - 2. third-party modules - 3. local application/library specific imports - -```py - import logging - import os - import typing as T - - import pandas as pd - import numpy as np - - import my_package - import my_package.my_module - from my_package.my_module import my_function, MyClass -``` - -* Even if a Python file is intended to be used as executable / script file only, it shall still be importable as a module, and its import should not have any side effects. Its main functionality shall hence be in a `main()` function, so that the code can be imported as a module for testing or being reused in the future: - -```py - def main(): - ... - - if __name__ == "__main__": - main() -``` - -## Unit-tests - -* Use pytest as the preferred testing framework. -* The name of a test shall clearly express what is being tested. -* Each test should preferably check only one specific aspect. - -```py - # Bad - def test_smth(): - result = f() - assert isinstance(result, list) - assert result[0] == 1 - assert result[1] == 2 - assert result[2] == 3 - assert result[3] == 4 - - # Good - def test_smth_type(): - result = f() - assert isinstance(result, list), "Result should be list" - - def test_smth_values(): - result = f() - assert set(result) == set(expected), f"Result should be {set(expected)}" -``` - -## And finally: It is a bad idea to use - -* global variables. -* iterators where they can be replaced by vectorized operations. -* lambda where it is not required. -* map and lambda where it can be replaced by a simple list comprehension. -* multiple nested maps and lambdas. -* nested functions. They are hard to test and debug. + +# Style Guide + +All code shall be [Ruff](https://docs.astral.sh/ruff) formatted. + +References, details as well as examples of bad/good styles and their respective reasoning can be found below. + +## References + +* [PEP-8](https://www.python.org/dev/peps/pep-0008/) (see also [pep8.org](https://pep8.org/)) +* [PEP-257](https://www.python.org/dev/peps/pep-0257/) +* Python style guide by [theluminousmen.com](https://luminousmen.com/post/the-ultimate-python-style-guidelines) +* [Documenting Python Code: A Complete Guide](https://realpython.com/documenting-python-code) +* [Jupyter](https://jupyter.readthedocs.io/en/latest/contributing/ipython-dev-guide/coding_style.html) style guide +* Python style guide on [learnpython.com](https://learnpython.com/blog/python-coding-best-practices-and-style-guidelines/) +* [Ruff](https://docs.astral.sh/ruff) + +## Code Layout + +* Use 4 spaces instead of tabs +* Maximum line length is 120 characters (not 79 as proposed in [PEP-8](https://www.python.org/dev/peps/pep-0008/)) +* 2 blank lines between classes and functions +* 1 blank line within class, between class methods +* Use blank lines for logic separation of functionality within functions/methods wherever it is justified +* No whitespace adjacent to parentheses, brackets, or braces + +```py + # Bad + spam( items[ 1 ], { key1 : arg1, key2 : arg2 }, ) + + # Good + spam(items[1], {key1: arg1, key2: arg2}, []) +``` + +* Surround operators with single whitespace on either side. + +```py + # Bad + x<1 + + # Good + x == 1 +``` + +* Never end your lines with a semicolon, and do not use a semicolon to put two statements on the same line +* When branching, always start a new block on a new line + +```py + # Bad + if flag: return None + + # Good + if flag: + return None +``` + +* Similarly to branching, do not write methods on one line in any case: + +```py + # Bad + def do_something(self): print("Something") + + # Good + def do_something(self): + print("Something") +``` + +* Place a class's `__init__` function (the constructor) always at the beginning of the class + +## Line Breaks + +* If function arguments do not fit into the specified line length, move them to a new line with indentation + +```py + # Bad + def long_function_name(var_one, var_two, var_three, + var_four): + print(var_one) + + # Bad + def long_function_name(var_one, var_two, var_three, + var_four): + print(var_one) + + # Better (but not preferred) + def long_function_name(var_one, + var_two, + var_three, + var_four): + print(var_one) + + # Good (and preferred) + def long_function_name( + var_one, + var_two, + var_three, + var_four, + ): + print(var_one) +``` + +* Move concatenated logical conditions to new lines if the line does not fit the maximum line size. This will help you understand the condition by looking from top to bottom. Poor formatting makes it difficult to read and understand complex predicates. + +```py + # Good + if ( + this_is_one_thing + and that_is_another_thing + or that_is_third_thing + or that_is_yet_another_thing + and one_more_thing + ): + do_something() +``` + +* Where binary operations stretch multiple lines, break lines before the binary operators, not thereafter + +```py + # Bad + GDP = ( + private_consumption + + gross_investment + + government_investment + + government_spending + + (exports - imports) + ) + + # Good + GDP = ( + private_consumption + + gross_investment + + government_investment + + government_spending + + (exports - imports) + ) +``` + +* Chaining methods should be broken up on multiple lines for better readability + +```py + ( + df.write.format("jdbc") + .option("url", "jdbc:postgresql:dbserver") + .option("dbtable", "schema.tablename") + .option("user", "username") + .option("password", "password") + .save() + ) +``` + +* Add a trailing comma to sequences of items when the closing container token ], ), or } does not appear on the same line as the final element + +```py + # Bad + y = [ + 0, + 1, + 4, + 6 + ] + z = { + 'a': 1, + 'b': 2 + } + + # Good + x = [1, 2, 3] + + # Good + y = [ + 0, + 1, + 4, + 6, <- note the trailing comma + ] + z = { + 'a': 1, + 'b': 2, <- note the trailing comma + } +``` + +## String Formatting + +* When quoting string literals, use double-quoted strings. When the string itself contains single or double quote characters, however, use the respective other one to avoid backslashes in the string. It improves readability. +* Use f-strings to format strings: + +```py + # Bad + print("Hello, %s. You are %s years old. You are a %s." % (name, age, profession)) + + # Good + print(f"Hello, {name}. You are {age} years old. You are a {profession}.") +``` + +* Use multiline strings, not \ , since it gets much more readable. + +```py + raise AttributeError( + "Here is a multiline error message with a very long first line " + "and a shorter second line." + ) +``` + +## Naming Conventions + +* For module names: `lowercase` . +Long module names can have words separated by underscores (`really_long_module_name.py`), but this is not required. Try to use the convention of nearby files. +* For class names: `CamelCase` +* For methods, functions, variables and attributes: `lowercase_with_underscores` +* For constants: `UPPERCASE` or `UPPERCASE_WITH_UNDERSCORES` +(Python does not differentiate between variables and constants. Using UPPERCASE for constants is just a convention, but helps a lot to quickly identify variables meant to serve as constants.) +* Implementation-specific private methods and variables will use `_single_underscore_prefix` +* Don't include the type of a variable in its name. + E.g. use `senders` instead of `sender_list` +* Names shall be clear about what a variable, class, or function contains or does. If you struggle to come up with a clear name, rethink your architecture: Often, the difficulty in finding a crisp name for something is a hint that separation of responsibilities can be improved. The solution then is less to agree on a name, but to start a round of refactoring: The name you're seeking often comes naturally then with refactoring to an improved architecture with clear responsibilities. +(see [SRP](https://en.wikipedia.org/wiki/Single-responsibility_principle), Single-Responsibilty Principle by Robert C. Martin) + +## Named Arguments + +* Use named arguments to improve readability and avoid mistakes introduced with future code maintenance + +```py + # Bad + urlget("[http://google.com](http://google.com/)", 20) + + # Good + urlget("[http://google.com](http://google.com/)", timeout=20) +``` + +* Never use mutable objects as default arguments in Python. If an attribute in a class or a named parameter in a function is of a mutable data type (e.g. a list or dict), never set its default value in the declaration of an object but always set it to None first, and then only later assign the default value in the class's constructor, or the functions body, respectively. Sounds complicated? If you prefer the shortcut, the examples below are your friend. +If you are interested in the long story including the why‘s, read these discussions on [Reddit](https://old.reddit.com/r/Python/comments/opb7hm/do_not_use_mutable_objects_as_default_arguments/) and [Twitter](https://twitter.com/willmcgugan/status/1419616480971399171). + +```py + # Bad + class Foo: + items = [] + + # Good + class Foo: + items = None + def __init__(self): + self.items = [] + + + # Bad + class Foo: + def __init__(self, items=[]): + self.items = items + + # Good + class Foo: + def __init__(self, items=None): + self.items = items or [] + + + # Bad + def some_function(x, y, items=[]): + ... + + # Good + def some_function(x, y, items=None): + items = items or [] + ... +``` + +## Commenting + +* First of all, if the code needs comments to clarify its work, you should think about refactoring it. The best comment to code is the code itself. +* Describe complex, possibly incomprehensible points and side effects in the comments +* Separate `#` and the comment with one whitespace + +```py + #bad comment + # good comment +``` + +* Use inline comments sparsely +* Where used, inline comments shall have 2 whitespaces before the `#` and one whitespace thereafter + +```py + x = y + z # inline comment + str1 = str2 + str3 # another inline comment +``` + +* If a piece of code is poorly understood, mark the piece with a `@TODO:` tag and your name to support future refactoring: + +```py + def get_ancestors_ids(self): + # @TODO: Do a cache reset while saving the category tree. CLAROS, YYYY-MM-DD + cache_name = f"{self._meta.model_name}_ancestors_{self.pk}" + cached_ids = cache.get(cache_name) + if cached_ids: + return cached_ids + + ids = [c.pk for c in self.get_ancestors(include_self=True)] + cache.set(cache_name, ids, timeout=3600) + + return ids +``` + +## Type hints + +* Use type hints in function signatures and module-scope variables. This is good documentation and can be used with linters for type checking and error checking. Use them whenever possible. +* Use pyi files to type annotate third-party or extension modules. + +## Docstrings + +* All Docstrings should be written in [Numpy](https://numpydoc.readthedocs.io/en/latest/format.html) format. For a good tutorial on Docstrings, see [Documenting Python Code: A Complete Guide](https://realpython.com/documenting-python-code) +* In a Docstring, summarize function/method behavior and document its arguments, return value(s), side effects, exceptions raised, and restrictions +* Wrap Docstrings with triple double quotes (""") +* The description of the arguments must be indented + +```py + def some_method(name, print=False): + """This function does something + + Parameters + ---------- + name : str + The name to use + print: bool, optional + A flag used to print the name to the console, by default False + + Raises + ------ + KeyError + If name is not found + + Returns + ------- + int + The return code + """ + ... + return 0 +``` + +## Exceptions + +* Raise specific exceptions and catch specific exceptions, such as KeyError, ValueError, etc. +* Do not raise or catch just Exception, except in rare cases where this is unavoidable, such as a try/except block on the top-level loop of some long-running process. For a good tutorial on why this matters, see [The Most Diabolical Python Antipattern](https://realpython.com/the-most-diabolical-python-antipattern/). +* Minimize the amount of code in a try/except block. The larger the body of the try, + the more likely that an exception will be raised by a line of code that you didn’t expect to raise an exception. + +## Imports + +* Avoid creating circular imports by importing modules more specialized than the one you are editing +* Relative imports are forbidden ([PEP-8](https://www.python.org/dev/peps/pep-0008/) only “highly discourages” them). Where absolutely needed, the `from future import absolute_import` syntax should be used (see [PEP-328](https://www.python.org/dev/peps/pep-0328/)) +* Never use wildcard imports (`from import *`). Always be explicit about what you're importing. Namespaces make code easier to read, so use them. +* Break long imports using parentheses and indent by 4 spaces. Include the trailing comma after the last import and place the closing bracket on a separate line + +```py + from my_pkg.utils import ( + some_utility_method_1, + some_utility_method_2, + some_utility_method_3, + some_utility_method_4, + some_utility_method_5, + ) +``` + +* Imports should be written in the following order, separated by a blank line: + 1. build-in modules + 2. third-party modules + 3. local application/library specific imports + +```py + import logging + import os + import typing as T + + import pandas as pd + import numpy as np + + import my_package + import my_package.my_module + from my_package.my_module import my_function, MyClass +``` + +* Even if a Python file is intended to be used as executable / script file only, it shall still be importable as a module, and its import should not have any side effects. Its main functionality shall hence be in a `main()` function, so that the code can be imported as a module for testing or being reused in the future: + +```py + def main(): + ... + + if __name__ == "__main__": + main() +``` + +## Unit-tests + +* Use pytest as the preferred testing framework. +* The name of a test shall clearly express what is being tested. +* Each test should preferably check only one specific aspect. + +```py + # Bad + def test_smth(): + result = f() + assert isinstance(result, list) + assert result[0] == 1 + assert result[1] == 2 + assert result[2] == 3 + assert result[3] == 4 + + # Good + def test_smth_type(): + result = f() + assert isinstance(result, list), "Result should be list" + + def test_smth_values(): + result = f() + assert set(result) == set(expected), f"Result should be {set(expected)}" +``` + +## And finally: It is a bad idea to use + +* global variables. +* iterators where they can be replaced by vectorized operations. +* lambda where it is not required. +* map and lambda where it can be replaced by a simple list comprehension. +* multiple nested maps and lambdas. +* nested functions. They are hard to test and debug. diff --git a/demos/folder_for_demos.py b/demos/folder_for_demos.py new file mode 100644 index 00000000..d65b8afc --- /dev/null +++ b/demos/folder_for_demos.py @@ -0,0 +1 @@ +# ruff: noqa diff --git a/docs/source/_templates/custom-class.rst b/docs/source/_templates/custom-class.rst new file mode 100644 index 00000000..76bb02fa --- /dev/null +++ b/docs/source/_templates/custom-class.rst @@ -0,0 +1,31 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :show-inheritance: + + {% block methods %} + .. automethod:: __init__ + + {% if methods %} + .. rubric:: {{ _('Methods') }} + + .. autosummary:: + {% for item in methods %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Attributes') }} + + .. autosummary:: + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/source/_templates/custom-module.rst b/docs/source/_templates/custom-module.rst new file mode 100644 index 00000000..914d438e --- /dev/null +++ b/docs/source/_templates/custom-module.rst @@ -0,0 +1,66 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {%- if attributes %} + .. rubric:: {{ _('Module Attributes') }} + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {%- endblock %} + + {%- block functions %} + {%- if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {%- endblock %} + + {%- block classes %} + {%- if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :template: custom-class.rst + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {%- endblock %} + + {%- block exceptions %} + {%- if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {%- endblock %} + +{%- block modules %} +{%- if modules %} +.. rubric:: Modules + +.. autosummary:: + :toctree: + :template: custom-module.rst + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{%- endblock %} diff --git a/docs/source/api.rst b/docs/source/api.rst index cfafed1c..1a9fdf1b 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -1,7 +1,9 @@ -API Documentation -================= - -.. toctree:: - :maxdepth: 4 - - ospx +API Reference +============= + +.. autosummary:: + :toctree: _autosummary + :template: custom-module.rst + :recursive: + + ospx diff --git a/docs/source/conf.py b/docs/source/conf.py index caac5ec0..def4be32 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,74 +1,74 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -sys.path.insert(0, os.path.abspath("../../src")) - - -# -- Project information ----------------------------------------------------- - -project = "ospx" -copyright = "2024, DNV SE. All rights reserved." -author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" - -# The full version, including alpha/beta/rc tags -release = "0.2.14" - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "myst_parser", - "sphinx.ext.autodoc", - "sphinx.ext.napoleon", - "sphinx_argparse_cli", -] - -# The file extensions of source files. -source_suffix = { - ".rst": "restructuredtext", - ".md": "markdown", -} - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = "furo" -html_logo = "_static/OSPx.svg" - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -autodoc_default_options = { - "member-order": "bysource", - "undoc-members": True, - "exclude-members": "__weakref__", -} -autodoc_preserve_defaults = True - -myst_heading_anchors = 3 +# ruff: noqa +# mypy: ignore-errors + +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import sys +from pathlib import Path + +sys.path.insert(0, str(Path("../../src").absolute())) + + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "ospx" +copyright = "2024, DNV SE. All rights reserved." +author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" + +# The full version, including alpha/beta/rc tags +release = "0.2.14" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "myst_parser", + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx_argparse_cli", + "sphinx.ext.mathjax", + "matplotlib.sphinxext.plot_directive", + "sphinx.ext.autosummary", + "sphinx.ext.todo", +] + +# The file extensions of source files. +source_suffix = { + ".rst": "restructuredtext", + ".md": "markdown", +} + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_title = f"ospx {release}" +html_theme = "furo" +html_static_path = ["_static"] +html_logo = "_static/OSPx.svg" +autodoc_default_options = { + "member-order": "bysource", + "undoc-members": True, + "exclude-members": "__weakref__", +} +autodoc_preserve_defaults = True + +myst_heading_anchors = 3 diff --git a/pyproject.toml b/pyproject.toml index f430b4d7..2e823e37 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,166 +1,165 @@ -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" - -[project] -name = "ospx" -version = "0.2.14" -description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." -readme = "README.md" -requires-python = ">= 3.9" -license = {file = "LICENSE"} -authors = [ - {name = "Frank Lumpitzsch", email = "frank.lumpitzsch@dnv.com"}, - {name = "Claas Rostock", email = "claas.rostock@dnv.com"}, -] -maintainers = [ - {name = "Claas Rostock", email = "claas.rostock@dnv.com"}, -] -keywords = [] -classifiers = [ - "Development Status :: 4 - Beta", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX :: Linux", - "Operating System :: MacOS", - "Environment :: Console", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Topic :: Scientific/Engineering", - "Topic :: Software Development :: Libraries :: Python Modules", -] -dependencies = [ - "lxml>=5.2", - "numpy>=1.26,<2.0", - "pandas>=2.2", - "matplotlib>=3.9", - "graphviz>=0.20", - "dictIO>=0.3.4", -] - -[project.urls] -Homepage = "https://github.com/dnv-opensource/ospx" -Documentation = "https://dnv-opensource.github.io/ospx/README.html" -Repository = "https://github.com/dnv-opensource/ospx.git" -Issues = "https://github.com/dnv-opensource/ospx/issues" -Changelog = "https://github.com/dnv-opensource/ospx/blob/main/CHANGELOG.md" - -[project.scripts] -ospCaseBuilder = "ospx.cli.ospCaseBuilder:main" -importSystemStructure = "ospx.cli.importSystemStructure:main" -watchCosim = "ospx.watch.cli.watchCosim:main" - - -[tool.setuptools.packages.find] -where = ["src"] -exclude = ["test*"] - -[tool.ruff] -exclude = [ - ".git", - ".venv", - ".tox", - "build", - "dist", - "__pycache__", - "./docs/source/conf.py", -] -src = ["src"] -line-length = 120 -target-version = "py39" - -[tool.ruff.lint] -ignore = [ - "E501", # Line length too long - "D100", # Missing docstring in public module - "D104", # Missing docstring in public package - "D105", # Missing docstring in magic method - "D107", # Missing docstring in __init__ - "D202", # No blank lines allowed after function docstring - "D203", # 1 blank line required before class docstring - "D205", # 1 blank line required between summary line and description - "D212", # Multi-line docstring summary should start at the first line - "D213", # Multi-line docstring summary should start at the second line - # "N802", # Function name should be lowercase (uncomment if you want to allow Uppercase function names) - # "N803", # Argument name should be lowercase (uncomment if you want to allow Uppercase argument names) - "N806", # Variable in function should be lowercase (uncomment if you want to allow Uppercase variable names in functions) - # "N815", # Variable in class scope should not be mixedCase (uncomment if you want to allow mixedCase variable names in class scope) - # "N816", # Variable in global scope should not be mixedCase (uncomment if you want to allow mixedCase variable names in global scope) - "N999", # Invalid module name - ] -select = [ - "E", - "D", - "F", - "N", - "W", - "I", - "B", -] - -[tool.ruff.lint.pep8-naming] -ignore-names = [ - "test_*", - "setUp", - "tearDown", -] - -[tool.ruff.lint.pydocstyle] -convention = "numpy" - -[tool.ruff.lint.per-file-ignores] -"__init__.py" = ["I001"] -"./tests/*" = ["D"] - -[tool.ruff.format] -docstring-code-format = true - -[tool.pyright] -exclude = [ - ".git", - ".venv", - ".tox", - "build", - "dist", - "**/__pycache__", - "./docs/source/conf.py", - "./venv", -] -extraPaths = ["./src"] -typeCheckingMode = "basic" -useLibraryCodeForTypes = true -reportMissingParameterType = "error" -reportUnknownParameterType = "warning" -reportUnknownMemberType = "warning" -reportMissingTypeArgument = "error" -reportPropertyTypeMismatch = "error" -reportFunctionMemberAccess = "warning" -reportPrivateUsage = "warning" -reportTypeCommentUsage = "warning" -reportIncompatibleMethodOverride = "warning" -reportIncompatibleVariableOverride = "error" -reportInconsistentConstructor = "error" -reportOverlappingOverload = "warning" -reportUninitializedInstanceVariable = "warning" -reportCallInDefaultInitializer = "warning" -reportUnnecessaryIsInstance = "information" -reportUnnecessaryCast = "warning" -reportUnnecessaryComparison = "warning" -reportUnnecessaryContains = "warning" -reportUnusedCallResult = "warning" -reportUnusedExpression = "warning" -reportMatchNotExhaustive = "warning" -reportShadowedImports = "warning" -reportUntypedFunctionDecorator = "warning" -reportUntypedBaseClass = "error" -reportUntypedNamedTuple = "warning" -# Activate the following rules only locally and temporary, i.e. for a QA session. -# (For server side CI they are considered too strict.) -# reportConstantRedefinition = "warning" -# reportUnnecessaryTypeIgnoreComment = "information" -# reportImportCycles = "warning" -# reportImplicitStringConcatenation = "warning" +[build-system] +requires = [ + "hatchling", +] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.sdist] +only-include = [ + "src/ospx", + "tests", + ".coveragerc", + ".editorconfig", + "pytest.ini", + "ruff.toml", + "uv.lock", +] + +[tool.hatch.build.targets.wheel] +packages = [ + "src/ospx", +] + +[project] +name = "ospx" +version = "0.2.14" +description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." +readme = "README.md" +requires-python = ">= 3.10" +license = { file = "LICENSE" } +authors = [ + { name = "Frank Lumpitzsch", email = "frank.lumpitzsch@dnv.com" }, + { name = "Claas Rostock", email = "claas.rostock@dnv.com" }, +] +maintainers = [ + { name = "Claas Rostock", email = "claas.rostock@dnv.com" }, +] +keywords = [ +] +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering", + "Topic :: Software Development :: Libraries :: Python Modules", +] +dependencies = [ + "lxml>=5.2", + "numpy>=1.26,<2.0", + "pandas>=2.2", + "matplotlib>=3.9", + "graphviz>=0.20", + "dictIO>=0.3.4", +] + +[project.urls] +Homepage = "https://github.com/dnv-opensource/ospx" +Documentation = "https://dnv-opensource.github.io/ospx/README.html" +Repository = "https://github.com/dnv-opensource/ospx.git" +Issues = "https://github.com/dnv-opensource/ospx/issues" +Changelog = "https://github.com/dnv-opensource/ospx/blob/main/CHANGELOG.md" + + +[tool.uv] +dev-dependencies = [ + "pytest>=8.3", + "pytest-cov>=5.0", + "ruff>=0.6.3", + "pyright>=1.1.378", + "mypy>=1.11.1", + "sourcery>=1.22", + "pre-commit>=3.8", + "Sphinx>=8.0", + "sphinx-argparse-cli>=1.17", + "sphinx-autodoc-typehints>=2.2", + "myst-parser>=4.0", + "furo>=2024.8", + "nbconvert>=7.16", + "jupyter>=1.0", + "jupyter-client>=8.6", + "pandas-stubs>=2.2", + "types-lxml>=2024.4", +] +native-tls = true + + +[project.scripts] +ospCaseBuilder = "ospx.cli.ospCaseBuilder:main" +importSystemStructure = "ospx.cli.importSystemStructure:main" +watchCosim = "ospx.watch.cli.watchCosim:main" + + +[tool.mypy] +plugins = [ + "numpy.typing.mypy_plugin", +] +mypy_path = "stubs" +files = [ + "src", + "tests", + "demos", +] +exclude = [ + "^src/folder_to_be_excluded/", +] +check_untyped_defs = true +disable_error_code = [ + "misc", + "import-untyped", +] + + +[tool.pyright] +stubPath = "stubs" +include = [ + "src", + "tests", + "demos", +] +exclude = [ + "src/folder_to_be_excluded", +] + +typeCheckingMode = "basic" +useLibraryCodeForTypes = true +reportMissingParameterType = "error" +reportUnknownParameterType = "warning" +reportUnknownMemberType = "warning" # consider to set to `false` if you work a lot with matplotlib and pandas, which are both not properly typed and known to trigger this warning +reportMissingTypeArgument = "error" +reportPropertyTypeMismatch = "error" +reportFunctionMemberAccess = "warning" +reportPrivateUsage = "warning" +reportTypeCommentUsage = "warning" +reportIncompatibleMethodOverride = "warning" +reportIncompatibleVariableOverride = "error" +reportInconsistentConstructor = "error" +reportOverlappingOverload = "warning" +reportUninitializedInstanceVariable = "warning" +reportCallInDefaultInitializer = "warning" +reportUnnecessaryIsInstance = "information" +reportUnnecessaryCast = "warning" +reportUnnecessaryComparison = "warning" +reportUnnecessaryContains = "warning" +reportUnusedCallResult = "warning" +reportUnusedExpression = "warning" +reportMatchNotExhaustive = "warning" +reportShadowedImports = "warning" +reportUntypedFunctionDecorator = "warning" +reportUntypedClassDecorator = "warning" +reportUntypedBaseClass = "error" +reportUntypedNamedTuple = "warning" +reportUnnecessaryTypeIgnoreComment = "information" +# Activate the following rules only locally and temporary, i.e. for a QA session. +# (For server side CI they are considered too strict.) +# reportMissingTypeStubs = true +# reportConstantRedefinition = "warning" +# reportImportCycles = "warning" +# reportImplicitStringConcatenation = "warning" diff --git a/pytest.ini b/pytest.ini index 66613232..89ecb94b 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ -[pytest] -testpaths = - tests -addopts = --strict-markers -xfail_strict = True +[pytest] +testpaths = + tests +addopts = --strict-markers --verbose +xfail_strict = True diff --git a/qa.bat b/qa.bat index 5175308a..915e0c58 100644 --- a/qa.bat +++ b/qa.bat @@ -1,4 +1,5 @@ -ruff format . -ruff check . -pyright . -sourcery review . +uv run ruff format +uv run ruff check +uv run pyright +uv run mypy +uv run sourcery review . diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index bf43243c..00000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,12 +0,0 @@ -pytest>=8.3 -pytest-cov>=5.0 -ruff>=0.6.3 -pyright>=1.1.378 -Sphinx>=8.0 -sphinx-argparse-cli>=1.17 -myst-parser>=4.0 -furo>=2024.8 -sourcery>=1.22 - --r requirements.txt --r requirements-types.txt diff --git a/requirements-types.txt b/requirements-types.txt deleted file mode 100644 index eaccda56..00000000 --- a/requirements-types.txt +++ /dev/null @@ -1 +0,0 @@ -types-lxml>=2024.4 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 3df674d2..00000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -lxml>=5.2 -numpy>=1.26,<2.0 -pandas>=2.2 -matplotlib>=3.9 -graphviz>=0.20 - -dictIO>=0.3.4 - -# ../dictIO diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 00000000..c5ed14bd --- /dev/null +++ b/ruff.toml @@ -0,0 +1,149 @@ +exclude = [ + ".git", + ".venv", + "dist", + "*cache", + "**/__pycache__", + "src/folder_to_be_excluded", +] +src = [ + "src", +] +line-length = 120 +target-version = "py310" + +[lint] +# Settings for Ruff linter (invoked with `uv run ruff check`). +# Start by including _all_ Ruff lint rules, then ignore selected rules as needed +# https://docs.astral.sh/ruff/rules/ +select = [ + "ALL", +] +ignore = [ + # Ruff lint rules considered as too strict and hence ignored + "ANN101", # Missing type annotation for `self` argument in instance methods (NOTE: also listed as deprecated by Ruff) + "ANN102", # Missing type annotation for `cls` argument in class methods (NOTE: also listed as deprecated by Ruff) + "FIX002", # Line contains TODO, consider resolving the issue + "TD003", # Missing issue link on the line following a TODO + "S101", # Use of assert detected + "RET504", # Unnecessary assignment to `result` before `return` statement + "EM101", # Exception must not use a string literal, assign to variable first + "EM102", # Exception must not use an f-string literal, assign to variable first + "TRY003", # Avoid specifying long messages outside the exception class + "PLR1711", # Useless `return` statement at end of function + "G00", # Logging statement uses string formatting ('G00' covers all rules flagging string formatting in logging, e.g. G001, G002, etc.) + + # Ruff lint rules recommended to keep enabled, + # but which are typical candidates you might have a need to ignore, + # especially in the beginning or when refactoring an existing codebase, + # to avoid too many Ruff errors at once. + # -> Listed here for easy access and reference. + # (uncomment to ignore) + # "N803", # Argument name should be lowercase (NOTE: ignore to allow capital arguments (e.g X) in scientific code) + # "N806", # Variable in function should be lowercase (NOTE: ignore to allow capital variables (e.g X) in scientific code) + # "TCH002", # Move third-party import into a type-checking block + # "TCH003", # Move standard library import into a type-checking block + + # Ruff lint rules known to be in conflict with Ruff formatter. + # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", # Tab-indentation (in conflict with Ruff formatter) + "E111", # Indentation with invalid multiple (in conflict with Ruff formatter) + "E114", # Indentation with invalid multiple comment (in conflict with Ruff formatter) + "E117", # Over-indented (in conflict with Ruff formatter) + "D206", # Indent with spaces (in conflict with Ruff formatter) + "D300", # Triple single quotes (in conflict with Ruff formatter) + "Q000", # Bad quotes in inline string (in conflict with Ruff formatter) + "Q001", # Bad quotes in multi-line string (in conflict with Ruff formatter) + "Q002", # Bad quotes in DocString (in conflict with Ruff formatter) + "Q003", # Avoidable escaped quote (in conflict with Ruff formatter) + "COM812", # Missing trailing comma (in conflict with Ruff formatter) + "COM819", # Prohibited trailing comma (in conflict with Ruff formatter) + "ISC001", # Single-line implicit string concatenation (in conflict with Ruff formatter) + "ISC002", # Multi-line implicit string concatenation (in conflict with Ruff formatter) +] +# File patterns to be excluded from Ruff lint +# (only needed for file patterns not already listed in the common `exclude` setting +# at top of this file, i.e. list here _additional_ excludes specific to Ruff lint.) +exclude = [ +] +allowed-confusables = [ + "×", # used as dim multiplication sign in comments, such as `19×16×15×16×8×6×3 = 10,506,240 possible combinations of parameters`. +] + +[lint.pep8-naming] +ignore-names = [ + "test_*", + "setUp", + "tearDown", +] + +[lint.pylint] +max-args = 7 + +[lint.flake8-pytest-style] +raises-require-match-for = [ + "BaseException", + "Exception", + "OSError", + "IOError", + "EnvironmentError", + "socket.error", +] + +[lint.per-file-ignores] +# `__init__.py` specific ignores +"__init__.py" = [ + "F401", # {name} imported but unused (NOTE: ignored as imports in `__init__.py` files are almost never used inside the module, but are intended for namespaces) + "I001", # Import block is un-sorted or un-formatted +] +# `tests` specific ignores +"tests/**/*" = [ + "D", # Missing docstrings + "ERA001", # Found commented-out code + "PT006", # Wrong type passed to first argument of `@pytest.mark.parametrize` (NOTE: ignored to allow parameters args as "args_1,arg_2,arg_3,..." + "S101", # Use of assert detected + "PLR2004", # Magic value used in comparison + "ANN201", # Missing return type annotation for public function + "ANN202", # Missing return type annotation for private function + "INP001", # File is part of an implicit namespace package. Add an `__init__.py`. (NOTE: tests are not intended to be a module, __init__.py hence not required.) + "SLF001", # Private member accessed + "TRY004", # Prefer `TypeError` exception for invalid type +] +# `stubs` specific ignores +"stubs/**/*" = [ + "D", # Missing docstrings + "ERA001", # Found commented-out code + "SLF001", # Private member accessed + "F405", # {name} may be undefined, or defined from star imports: {module} + "F403", # from {name} import * used; unable to detect undefined names + "ANN", # Missing type annotation + "N", # Naming violations + "A001", # Variable {name} is shadowing a Python builtin + "A002", # Argument {name} is shadowing a Python builtin + "FBT001", # Boolean-typed positional argument in function definition + "PYI042", # Type alias {name} should be CamelCase + "PYI002", # complex if statement in stub + "PLR0913", # Too many arguments in function definition +] +# Jupyter notebook specific ignores +"**/*.ipynb" = [ + "D103", # Missing docstring in public function + "T201", # `print` found + "PGH003", # Use specific rule codes when ignoring type issues + "TCH002", # Move third-party import into a type-checking block +] +# `demos` specific ignores +"demos/**/*" = [ + "D", # Missing docstrings + "S101", # Use of assert detected + "PLR2004", # Magic value used in comparison + "INP001", # File is part of an implicit namespace package. Add an `__init__.py`. (NOTE: tutorials are not intended to be a module, __init__.py hence not required.) + "T201", # `print` found + "E402", # Module level import not at top of cell +] + +[lint.pydocstyle] +convention = "numpy" + +[format] +docstring-code-format = true diff --git a/tests/.gitignore b/tests/.gitignore index ac2d943b..7e2701dc 100644 --- a/tests/.gitignore +++ b/tests/.gitignore @@ -1,2 +1,4 @@ +temp_* +~$temp* parsed.* reread.* \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index ee1cad92..99494aed 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,83 +1,79 @@ -import logging -import os -from glob import glob -from pathlib import Path -from shutil import rmtree - -import pytest -from pytest import LogCaptureFixture - -from ospx.utils.zip import add_file_content_to_zip - - -@pytest.fixture(scope="package", autouse=True) -def chdir(): - os.chdir(Path(__file__).parent.absolute() / "test_dicts") - - -@pytest.fixture(scope="package", autouse=True) -def test_dir(): - return Path(__file__).parent.absolute() - - -output_dirs = [ - "xyz", -] -output_files = [ - "parsed*", - "*.xml", - "*.fmu", - "*.csv", - "*.ssd", - "statisticsDict", - "watchDict", - "caseDict_imported_from_test_import_OspSystemStructure_xml", -] - - -@pytest.fixture(autouse=True) -def default_setup_and_teardown(caplog: LogCaptureFixture): - _remove_output_dirs_and_files() - _create_test_fmu() - yield - # _remove_test_fmu() - _remove_output_dirs_and_files() - - -def _remove_output_dirs_and_files(): - for folder in output_dirs: - rmtree(folder, ignore_errors=True) - for pattern in output_files: - for file in glob(pattern): - file = Path(file) - if not file.name.startswith("test_"): - file.unlink(missing_ok=True) - - -def _create_test_fmu(): - model_description_file: Path = Path("test_fmu_modelDescription.xml") - model_description: str = "" - with open(model_description_file, "r") as f: - model_description = f.read() - fmu_file: Path = Path("test_fmu.fmu") - fmu_file.unlink(missing_ok=True) - _ = add_file_content_to_zip( - zip_file=fmu_file, - file_name="modelDescription.xml", - file_content=model_description, - ) - - -def _remove_test_fmu(): - Path("test_fmu.fmu").unlink() - - -@pytest.fixture(autouse=True) -def setup_logging(caplog: LogCaptureFixture): - caplog.set_level("WARNING") - caplog.clear() - - -@pytest.fixture(autouse=True) -def logger(): - return logging.getLogger() +import logging +import os +from pathlib import Path +from shutil import rmtree + +import pytest + +from ospx.utils.zip import add_file_content_to_zip + + +@pytest.fixture(scope="package", autouse=True) +def chdir() -> None: + os.chdir(Path(__file__).parent.absolute() / "test_dicts") + + +@pytest.fixture(scope="package", autouse=True) +def test_dir() -> Path: + return Path(__file__).parent.absolute() + + +output_dirs = [] +output_files = [ + "parsed*", + "*.xml", + "*.fmu", + "*.csv", + "*.ssd", + "statisticsDict", + "watchDict", + "caseDict_imported_from_test_import_OspSystemStructure_xml", +] + + +@pytest.fixture(autouse=True) +def default_setup_and_teardown(): + _remove_output_dirs_and_files() + _create_test_fmu() + yield + # _remove_test_fmu() + _remove_output_dirs_and_files() + + +def _remove_output_dirs_and_files() -> None: + for folder in output_dirs: + rmtree(folder, ignore_errors=True) + for pattern in output_files: + for file in Path.cwd().glob(pattern): + _file = Path(file) + if not _file.name.startswith("test_"): + _file.unlink(missing_ok=True) + + +def _create_test_fmu(): + model_description_file: Path = Path("test_fmu_modelDescription.xml") + model_description: str = "" + with open(model_description_file, "r") as f: + model_description = f.read() + fmu_file: Path = Path("test_fmu.fmu") + fmu_file.unlink(missing_ok=True) + _ = add_file_content_to_zip( + zip_file=fmu_file, + file_name="modelDescription.xml", + file_content=model_description, + ) + + +def _remove_test_fmu(): + Path("test_fmu.fmu").unlink() + + +@pytest.fixture(autouse=True) +def setup_logging(caplog: pytest.LogCaptureFixture) -> None: + caplog.set_level("INFO") + caplog.clear() + + +@pytest.fixture(autouse=True) +def logger() -> logging.Logger: + return logging.getLogger() From 6e4b651bb10ea9484206cc20500e71257fd9f3dc Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 21 Oct 2024 17:33:33 +0200 Subject: [PATCH 12/32] .editorconfig : removed 'end_of_line = crlf' as this is handled via .gitattributes now --- .editorconfig | 1 - 1 file changed, 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index b46c31e5..6dda6af1 100644 --- a/.editorconfig +++ b/.editorconfig @@ -6,7 +6,6 @@ root = true [*] indent_style = space indent_size = 4 -end_of_line = crlf charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true From feaec392f3777e2c8aef373b6b6fdd350aa37a1a Mon Sep 17 00:00:00 2001 From: Claas Date: Tue, 22 Oct 2024 20:04:19 +0200 Subject: [PATCH 13/32] resolved issues raised by ruff --- MANIFEST.in | 2 - ruff.toml | 7 + src/ospx/__init__.py | 24 +- src/ospx/cli/importSystemStructure.py | 221 +++-- src/ospx/cli/ospCaseBuilder.py | 6 +- src/ospx/component.py | 567 +++++------ src/ospx/connection.py | 398 ++++---- src/ospx/connector.py | 307 +++--- src/ospx/fmi/__init__.py | 22 +- src/ospx/fmi/experiment.py | 39 +- src/ospx/fmi/fmu.py | 748 +++++++------- src/ospx/fmi/unit.py | 117 ++- src/ospx/fmi/variable.py | 514 +++++----- src/ospx/graph.py | 443 ++++----- src/ospx/importer.py | 601 ++++++------ src/ospx/ospCaseBuilder.py | 31 +- src/ospx/ospSimulationCase.py | 1288 +++++++++++++------------ src/ospx/simulation.py | 65 +- src/ospx/system.py | 366 +++---- src/ospx/utils/dateTime.py | 5 +- src/ospx/utils/dict.py | 114 +-- src/ospx/utils/logging.py | 28 +- src/ospx/utils/plotting.py | 33 +- src/ospx/utils/zip.py | 95 +- src/ospx/watch/cli/watchCosim.py | 37 +- src/ospx/watch/watchCosim.py | 873 ++++++++--------- tests/conftest.py | 30 +- tests/test_dicts/test_fmu.fmu | Bin 1227 -> 1227 bytes tests/test_fmu.py | 112 +-- tests/test_ospCaseBuilder.py | 106 +- tests/test_watchCosim.py | 95 +- tox.ini | 27 - 32 files changed, 3675 insertions(+), 3646 deletions(-) delete mode 100644 MANIFEST.in delete mode 100644 tox.ini diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index c0d3bae1..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -graft src -recursive-exclude __pycache__ *.py[cod] diff --git a/ruff.toml b/ruff.toml index c5ed14bd..79708cc6 100644 --- a/ruff.toml +++ b/ruff.toml @@ -20,6 +20,13 @@ select = [ "ALL", ] ignore = [ + # Ruff lint rules temporarily ignored, but which should be reactivated and resolved in the future. + "D", # Missing docstrings <- @TODO: reactivate and resolve docstring issues @CLAROS, 2024-10-21 + "N999", # Invalid module name <- @TODO: reactivate and resolve @CLAROS, 2024-10-21 + "C901", # Function is too complex <- @TODO: reactivate and resolve print statements @CLAROS, 2024-10-21 + "PLR0911", # Too many return statements <- @TODO: reactivate and resolve @CLAROS, 2024-10-21 + "PLR0912", # Too many branches <- @TODO: reactivate and resolve @CLAROS, 2024-10-21 + "PLR0915", # Too many statements <- @TODO: reactivate and resolve @CLAROS, 2024-10-21 # Ruff lint rules considered as too strict and hence ignored "ANN101", # Missing type annotation for `self` argument in instance methods (NOTE: also listed as deprecated by Ruff) "ANN102", # Missing type annotation for `cls` argument in class methods (NOTE: also listed as deprecated by Ruff) diff --git a/src/ospx/__init__.py b/src/ospx/__init__.py index 4d15e50b..9522e3c2 100644 --- a/src/ospx/__init__.py +++ b/src/ospx/__init__.py @@ -1,12 +1,12 @@ -from ospx.simulation import Simulation as Simulation # noqa: F401 -from ospx.connector import Connector as Connector # noqa: F401 -from ospx.connection import ( # noqa: F401 - Endpoint as Endpoint, - Connection as Connection, -) -from ospx.component import Component as Component # noqa: F401 -from ospx.system import System as System # noqa: F401 -from ospx.ospSimulationCase import OspSimulationCase as OspSimulationCase # noqa: F401 -from ospx.graph import Graph as Graph # noqa: F401 -from ospx.ospCaseBuilder import OspCaseBuilder as OspCaseBuilder # noqa: F401 -from ospx.importer import OspSystemStructureImporter as OspSystemStructureImporter # noqa: F401 +from ospx.simulation import Simulation +from ospx.connector import Connector +from ospx.connection import ( + Endpoint, + Connection, +) +from ospx.component import Component +from ospx.system import System +from ospx.ospSimulationCase import OspSimulationCase +from ospx.graph import Graph +from ospx.ospCaseBuilder import OspCaseBuilder +from ospx.importer import OspSystemStructureImporter diff --git a/src/ospx/cli/importSystemStructure.py b/src/ospx/cli/importSystemStructure.py index ac8d6674..3772c917 100644 --- a/src/ospx/cli/importSystemStructure.py +++ b/src/ospx/cli/importSystemStructure.py @@ -1,112 +1,109 @@ -#!/usr/bin/env python -# coding: utf-8 - -import argparse -import logging -from argparse import ArgumentParser -from pathlib import Path -from typing import Union - -from ospx import OspSystemStructureImporter -from ospx.utils.logging import configure_logging - -logger = logging.getLogger(__name__) - - -def _argparser() -> argparse.ArgumentParser: - parser = ArgumentParser( - prog="importSystemStructure", - usage="%(prog)s systemStructureFile [options [args]]", - epilog="_________________importSystemStructure___________________", - prefix_chars="-", - add_help=True, - description=("Imports an existing OspSystemStructure.xml and translates it into a caseDict."), - ) - - _ = parser.add_argument( - "systemStructureFile", - metavar="systemStructureFile", - type=str, - help="name of the system structure file", - default="OspSystemStructure.xml", - ) - - console_verbosity = parser.add_mutually_exclusive_group(required=False) - - _ = console_verbosity.add_argument( - "-q", - "--quiet", - action="store_true", - help=("console output will be quiet."), - default=False, - ) - - _ = console_verbosity.add_argument( - "-v", - "--verbose", - action="store_true", - help=("console output will be verbose."), - default=False, - ) - - _ = parser.add_argument( - "--log", - action="store", - type=str, - help="name of log file. If specified, this will activate logging to file.", - default=None, - required=False, - ) - - _ = parser.add_argument( - "--log-level", - action="store", - type=str, - help="log level applied to logging to file.", - choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], - default="WARNING", - required=False, - ) - - return parser - - -def main(): - """Entry point for console script as configured in setup.cfg. - - Runs the command line interface and parses arguments and options entered on the console. - """ - - parser = _argparser() - args = parser.parse_args() - - # Configure Logging - log_level_console: str = "WARNING" - if any([args.quiet, args.verbose]): - log_level_console = "ERROR" if args.quiet else log_level_console - log_level_console = "DEBUG" if args.verbose else log_level_console - # ..to file - log_file: Union[Path, None] = Path(args.log) if args.log else None - log_level_file: str = args.log_level - configure_logging(log_level_console, log_file, log_level_file) - - system_structure_file: Path = Path(args.systemStructureFile) - - # Check whether system structure file exists - if not system_structure_file.is_file(): - logger.error(f"importSystemStructure: File {system_structure_file} not found.") - return - - logger.info( - f"Start importSystemStructure.py with following arguments:\n" - f"\t system_structure_file: \t{system_structure_file}" - ) - - # Invoke API - OspSystemStructureImporter.import_system_structure(system_structure_file) - - return - - -if __name__ == "__main__": - main() +#!/usr/bin/env python + +import argparse +import logging +from argparse import ArgumentParser +from pathlib import Path + +from ospx import OspSystemStructureImporter +from ospx.utils.logging import configure_logging + +logger = logging.getLogger(__name__) + + +def _argparser() -> argparse.ArgumentParser: + parser = ArgumentParser( + prog="importSystemStructure", + usage="%(prog)s systemStructureFile [options [args]]", + epilog="_________________importSystemStructure___________________", + prefix_chars="-", + add_help=True, + description=("Imports an existing OspSystemStructure.xml and translates it into a caseDict."), + ) + + _ = parser.add_argument( + "systemStructureFile", + metavar="systemStructureFile", + type=str, + help="name of the system structure file", + default="OspSystemStructure.xml", + ) + + console_verbosity = parser.add_mutually_exclusive_group(required=False) + + _ = console_verbosity.add_argument( + "-q", + "--quiet", + action="store_true", + help=("console output will be quiet."), + default=False, + ) + + _ = console_verbosity.add_argument( + "-v", + "--verbose", + action="store_true", + help=("console output will be verbose."), + default=False, + ) + + _ = parser.add_argument( + "--log", + action="store", + type=str, + help="name of log file. If specified, this will activate logging to file.", + default=None, + required=False, + ) + + _ = parser.add_argument( + "--log-level", + action="store", + type=str, + help="log level applied to logging to file.", + choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], + default="WARNING", + required=False, + ) + + return parser + + +def main() -> None: + """Entry point for console script as configured in setup.cfg. + + Runs the command line interface and parses arguments and options entered on the console. + """ + parser = _argparser() + args = parser.parse_args() + + # Configure Logging + log_level_console: str = "WARNING" + if any([args.quiet, args.verbose]): + log_level_console = "ERROR" if args.quiet else log_level_console + log_level_console = "DEBUG" if args.verbose else log_level_console + # ..to file + log_file: Path | None = Path(args.log) if args.log else None + log_level_file: str = args.log_level + configure_logging(log_level_console, log_file, log_level_file) + + system_structure_file: Path = Path(args.systemStructureFile) + + # Check whether system structure file exists + if not system_structure_file.is_file(): + logger.error(f"importSystemStructure: File {system_structure_file} not found.") + return + + logger.info( + f"Start importSystemStructure.py with following arguments:\n" + f"\t system_structure_file: \t{system_structure_file}" + ) + + # Invoke API + OspSystemStructureImporter.import_system_structure(system_structure_file) + + return + + +if __name__ == "__main__": + main() diff --git a/src/ospx/cli/ospCaseBuilder.py b/src/ospx/cli/ospCaseBuilder.py index 6ce05f7d..ac9388b4 100644 --- a/src/ospx/cli/ospCaseBuilder.py +++ b/src/ospx/cli/ospCaseBuilder.py @@ -1,10 +1,8 @@ #!/usr/bin/env python -# coding: utf-8 import argparse import logging from pathlib import Path -from typing import Union from ospx import OspCaseBuilder from ospx.utils.logging import configure_logging @@ -98,7 +96,7 @@ def _argparser() -> argparse.ArgumentParser: return parser -def main(): +def main() -> None: """Entry point for console script as configured in setup.cfg. Runs the command line interface and parses arguments and options entered on the console. @@ -114,7 +112,7 @@ def main(): log_level_console = "DEBUG" if args.verbose else log_level_console # ..to file - log_file: Union[Path, None] = Path(args.log) if args.log else None + log_file: Path | None = Path(args.log) if args.log else None log_level_file: str = args.log_level configure_logging(log_level_console, log_file, log_level_file) diff --git a/src/ospx/component.py b/src/ospx/component.py index a23a6b09..669a47cc 100644 --- a/src/ospx/component.py +++ b/src/ospx/component.py @@ -1,282 +1,285 @@ -import logging -from copy import deepcopy -from dataclasses import dataclass, field -from pathlib import Path -from typing import Any, MutableMapping, Union - -from dictIO import DictWriter -from dictIO.utils.counter import BorgCounter - -from ospx import Connector -from ospx.fmi import FMU, ScalarVariable, Unit - -__ALL__ = ["Component"] - -logger = logging.getLogger(__name__) - - -class Component: - r"""A component is an instance of a (component-) model. - - A component represents an instance of a (component-) model. Any system structure can contain an arbitrary number of components. - Important here is, that multiple components in a system structure can be instances of one and the same model. - In practical terms this means that multiple components can refer to the same physical FMU file. \n - As components are instances of a model (FMU), they inherit the start values defined in the FMU's modelDescription file upon instantiation; \n - howevere, being an instance, each component can alter and overwrite these start values. - This is accomplished using the 'initialize' section inside a 'component' element in the ospx case dict. \n - See https://dnv-opensource.github.io/ospx/fileFormat.caseDict.html - - Equivalent terms to 'component' are: \n - \t 'Simulator' in OSP. See https://open-simulation-platform.github.io/libcosim/configuration#simulator \n - \t 'Simulation model' in FMI for co-simulation. See https://github.com/modelica/fmi-standard/releases/download/v2.0.3/FMI-Specification-2.0.3.pdf \n - \t 'Component' in SSP. See https://ssp-standard.org/publications/SSP10/SystemStructureAndParameterization10.pdf - """ - - def __init__(self, name: str, properties: MutableMapping[Any, Any]): - self.name: str = name - self.generate_proxy = False - self.fmu: FMU - self.step_size: Union[float, None] = None - self._initial_values: dict[str, ScalarVariable] = {} - self._connectors: dict[str, Connector] = {} - self.generate_proxy: bool = False - self.remote_access: Union[RemoteAccess, None] = None - self.counter = BorgCounter() - self._units: dict[str, Unit] - self._variables: dict[str, ScalarVariable] - - self._read_fmu(properties) - self._read_step_size(properties) - - self._read_generate_proxy(properties) - self._read_remote_access(properties) - if self.generate_proxy: - self._generate_proxy() - - self._read_initialize(properties) - self._init_units() - self._init_variables() - - self._read_connectors(properties) - - def _read_fmu(self, properties: MutableMapping[Any, Any]): - if "fmu" not in properties: - msg = f"component {self.name}: 'fmu' element missing in case dict." - logger.exception(msg) - raise ValueError(msg) - # Read name of fmu file from component properties. - # Note: Relative paths (if existing) are omitted from the fmu name, as it is by convention expected - # that the fmu has already been copied from the library into the current working directory (=case folder) - fmu_file = Path(properties["fmu"]) - if not fmu_file.exists(): - logger.exception(f"component {self.name}: referenced FMU file {fmu_file} not found.") - raise FileNotFoundError(fmu_file) - self.fmu = FMU(fmu_file) - if self.fmu.default_experiment and not self.step_size: - self.step_size = self.fmu.default_experiment.step_size - - def _read_step_size(self, properties: MutableMapping[Any, Any]): - if "stepSize" not in properties: - return - self.step_size = float(properties["stepSize"]) - - def _read_initialize(self, properties: MutableMapping[Any, Any]): - if "initialize" not in properties: - return - for variable_name, variable_properties in properties["initialize"].items(): - variable = ScalarVariable(name=variable_name) - if "causality" in variable_properties: - variable.causality = variable_properties["causality"] - if "variability" in variable_properties: - variable.variability = variable_properties["variability"] - if "start" in variable_properties: - variable.start = variable_properties["start"] - self._initial_values[variable.name] = variable - - def _read_connectors(self, properties: MutableMapping[Any, Any]): - if "connectors" not in properties: - return - for connector_name, connector_properties in properties["connectors"].items(): - connector = Connector(name=connector_name) - if "variable" in connector_properties: - connector.variable = connector_properties["variable"] - if "variableGroup" in connector_properties: - connector.variable_group = connector_properties["variableGroup"] - if "type" in connector_properties: - connector.type = connector_properties["type"] - self._connectors[connector.name] = connector - - def _read_generate_proxy(self, properties: MutableMapping[Any, Any]): - if "generate_proxy" not in properties: - return - self.generate_proxy = properties["generate_proxy"] - - def _read_remote_access(self, properties: MutableMapping[Any, Any]): - if "remoteAccess" not in properties: - return - if "host" in properties["remoteAccess"] and "port" in properties["remoteAccess"]: - self.remote_access = RemoteAccess( - host=properties["remoteAccess"]["host"], - port=properties["remoteAccess"]["port"], - ) - - def _generate_proxy(self): - if not self.remote_access: - logger.error( - f"component {self.name}: 'generate_proxy' set to True, but the 'remoteAccess' element is not correctly defined." - ) - elif not self.remote_access.host: - logger.error(f"component {self.name}: 'remoteAccess' element is defined, but host is not specified.") - elif not self.remote_access.port: - logger.error(f"component {self.name}: 'remoteAccess' element is defined, but port is not specified.") - else: - self.fmu = self.fmu.proxify(self.remote_access.host, self.remote_access.port) - # if NTNU-IHB fmu-proxy code is used, use '-proxy' reference - self.name = f"{self.name}-proxy" - # self.name = self.fmu.file.stem - - def _init_units(self): - self._units = deepcopy(self.fmu.units) - - def _init_variables(self): - self._variables = deepcopy(self.fmu.variables) - - for variable_name, variable in self._initial_values.items(): - if variable.causality: - self._variables[variable_name].causality = variable.causality - if variable.variability: - self._variables[variable_name].variability = variable.variability - if variable.start: - self._variables[variable_name].start = variable.start - - @property - def variables_with_start_values(self) -> dict[str, ScalarVariable]: - """Returns a dict with all scalar variables for which start values (initial values) - are defined in the component. - - Returns - ------- - dict[str, ScalarVariable] - dict with all scalar variables with start values defined. - """ - return self._initial_values - - @property - def units(self) -> dict[str, Unit]: - """Returns a dict with all units defined in the component. - - Returns - ------- - dict[str, Unit] - dict with all units - """ - return self._units - - @property - def variables(self) -> dict[str, ScalarVariable]: - """Returns a dict with all scalar variables defined in the component. - - Returns - ------- - dict[str, ScalarVariable] - dict with all scalar variables - """ - return self._variables - - @property - def connectors(self) -> dict[str, Connector]: - """Returns a dict with all connectors defined by the component. - - Returns - ------- - dict[str, Connector] - dict with all connectors - """ - return self._connectors - - def write_osp_model_description_xml(self): # sourcery skip: merge-dict-assign - """Write the _OspModelDescription.xml file in the current working directory.""" - osp_model_description_file = self.fmu.file.parent.absolute() / f"{self.name}_OspModelDescription.xml" - self._clean(osp_model_description_file) - - osp_model_description = {} - - # Unit Definitions - unit_definitions = {} - for unit in self.units.values(): - unit_definition = {"_attributes": {}} - unit_definition["_attributes"]["name"] = unit.name - if unit.base_unit: - unit_definition["BaseUnit"] = {"_attributes": {}} - if unit.base_unit.kg: - unit_definition["BaseUnit"]["_attributes"]["kg"] = unit.base_unit.kg - if unit.base_unit.m: - unit_definition["BaseUnit"]["_attributes"]["m"] = unit.base_unit.m - if unit.base_unit.s: - unit_definition["BaseUnit"]["_attributes"]["s"] = unit.base_unit.s - if unit.base_unit.A: - unit_definition["BaseUnit"]["_attributes"]["A"] = unit.base_unit.A - if unit.base_unit.K: - unit_definition["BaseUnit"]["_attributes"]["K"] = unit.base_unit.K - if unit.base_unit.mol: - unit_definition["BaseUnit"]["_attributes"]["mol"] = unit.base_unit.mol - if unit.base_unit.cd: - unit_definition["BaseUnit"]["_attributes"]["cd"] = unit.base_unit.cd - if unit.base_unit.rad: - unit_definition["BaseUnit"]["_attributes"]["rad"] = unit.base_unit.rad - if unit.base_unit.factor: - unit_definition["BaseUnit"]["_attributes"]["factor"] = unit.base_unit.factor - if unit.base_unit.offset: - unit_definition["BaseUnit"]["_attributes"]["offset"] = unit.base_unit.offset - if unit.display_unit: - unit_definition["DisplayUnit"] = {"_attributes": {}} - unit_definition["DisplayUnit"]["_attributes"]["name"] = unit.display_unit.name - unit_definition["DisplayUnit"]["_attributes"]["factor"] = unit.display_unit.factor - unit_definition["DisplayUnit"]["_attributes"]["offset"] = unit.display_unit.offset - unit_definitions[f"{self.counter():06d}_Unit"] = unit_definition - osp_model_description["UnitDefinitions"] = unit_definitions - - # Variable Groups - variable_groups = {} - for variable_name, variable in self.variables.items(): - if not variable.quantity: - logger.warning(f"component {self.name}: no quantity defined for variable {variable_name}") - if not variable.unit: - logger.warning(f"component {self.name}: no unit defined for variable {variable_name}") - quantity_name = variable.quantity or "UNKNOWN" - quantity_unit = variable.unit or "UNKNOWN" - variable_groups[f"{self.counter():06d}_Generic"] = { - "_attributes": {"name": quantity_name}, - quantity_name: { - "_attributes": {"name": quantity_name}, - "Variable": { - "_attributes": { - "ref": variable_name, - "unit": quantity_unit, - } - }, - }, - } - osp_model_description["VariableGroups"] = variable_groups - - # _xmlOpts - osp_model_description["_xmlOpts"] = { - "_nameSpaces": {"osp": "https://opensimulationplatform.com/xsd/OspModelDescription-1.0.0.xsd"}, - "_rootTag": "ospModelDescription", - } - - DictWriter.write(osp_model_description, osp_model_description_file) - - def _clean(self, file_to_remove: Union[str, Path]): - """Clean up single file.""" - if isinstance(file_to_remove, str): - file_to_remove = Path.cwd() / file_to_remove - file_to_remove.unlink(missing_ok=True) - - -@dataclass() -class RemoteAccess: - """Data class holding the host and port information for a remote connection.""" - - host: str = field(default_factory=lambda: "") - port: int = 0 +import logging +from collections.abc import MutableMapping +from copy import deepcopy +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +from dictIO import DictWriter +from dictIO.utils.counter import BorgCounter + +from ospx import Connector +from ospx.fmi import FMU, ScalarVariable, Unit + +__ALL__ = ["Component"] + +logger = logging.getLogger(__name__) + + +class Component: + r"""A component is an instance of a (component-) model. + + A component represents an instance of a (component-) model. + Any system structure can contain an arbitrary number of components. + Important here is, that multiple components in a system structure can be instances of one and the same model. + In practical terms this means that multiple components can refer to the same physical FMU file. \n + As components are instances of a model (FMU), they inherit the start values defined + in the FMU's modelDescription file upon instantiation; \n + howevere, being an instance, each component can alter and overwrite these start values. + This is accomplished using the 'initialize' section inside a 'component' element in the ospx case dict. \n + See https://dnv-opensource.github.io/ospx/fileFormat.caseDict.html + + Equivalent terms to 'component' are: \n + \t 'Simulator' in OSP. See https://open-simulation-platform.github.io/libcosim/configuration#simulator \n + \t 'Simulation model' in FMI for co-simulation. See https://github.com/modelica/fmi-standard/releases/download/v2.0.3/FMI-Specification-2.0.3.pdf \n + \t 'Component' in SSP. See https://ssp-standard.org/publications/SSP10/SystemStructureAndParameterization10.pdf + """ # noqa: E501 + + def __init__(self, name: str, properties: MutableMapping[Any, Any]) -> None: + self.name: str = name + self.fmu: FMU + self.step_size: float | None = None + self._initial_values: dict[str, ScalarVariable] = {} + self._connectors: dict[str, Connector] = {} + self.generate_proxy: bool = False + self.remote_access: RemoteAccess | None = None + self.counter = BorgCounter() + self._units: dict[str, Unit] + self._variables: dict[str, ScalarVariable] + + self._read_fmu(properties) + self._read_step_size(properties) + + self._read_generate_proxy(properties) + self._read_remote_access(properties) + if self.generate_proxy: + self._generate_proxy() + + self._read_initialize(properties) + self._init_units() + self._init_variables() + + self._read_connectors(properties) + + def _read_fmu(self, properties: MutableMapping[Any, Any]) -> None: + if "fmu" not in properties: + msg = f"component {self.name}: 'fmu' element missing in case dict." + logger.exception(msg) + raise ValueError(msg) + # Read name of fmu file from component properties. + # Note: Relative paths (if existing) are omitted from the fmu name, as it is by convention expected + # that the fmu has already been copied from the library into the current working directory (=case folder) + fmu_file = Path(properties["fmu"]) + if not fmu_file.exists(): + logger.exception(f"component {self.name}: referenced FMU file {fmu_file} not found.") + raise FileNotFoundError(fmu_file) + self.fmu = FMU(fmu_file) + if self.fmu.default_experiment and not self.step_size: + self.step_size = self.fmu.default_experiment.step_size + + def _read_step_size(self, properties: MutableMapping[Any, Any]) -> None: + if "stepSize" not in properties: + return + self.step_size = float(properties["stepSize"]) + + def _read_initialize(self, properties: MutableMapping[Any, Any]) -> None: + if "initialize" not in properties: + return + for variable_name, variable_properties in properties["initialize"].items(): + variable = ScalarVariable(name=variable_name) + if "causality" in variable_properties: + variable.causality = variable_properties["causality"] + if "variability" in variable_properties: + variable.variability = variable_properties["variability"] + if "start" in variable_properties: + variable.start = variable_properties["start"] + self._initial_values[variable.name] = variable + + def _read_connectors(self, properties: MutableMapping[Any, Any]) -> None: + if "connectors" not in properties: + return + for connector_name, connector_properties in properties["connectors"].items(): + connector = Connector(name=connector_name) + if "variable" in connector_properties: + connector.variable = connector_properties["variable"] + if "variableGroup" in connector_properties: + connector.variable_group = connector_properties["variableGroup"] + if "type" in connector_properties: + connector.type = connector_properties["type"] + self._connectors[connector.name] = connector + + def _read_generate_proxy(self, properties: MutableMapping[Any, Any]) -> None: + if "generate_proxy" not in properties: + return + self.generate_proxy = properties["generate_proxy"] + + def _read_remote_access(self, properties: MutableMapping[Any, Any]) -> None: + if "remoteAccess" not in properties: + return + if "host" in properties["remoteAccess"] and "port" in properties["remoteAccess"]: + self.remote_access = RemoteAccess( + host=properties["remoteAccess"]["host"], + port=properties["remoteAccess"]["port"], + ) + + def _generate_proxy(self) -> None: + if not self.remote_access: + logger.error( + f"component {self.name}: 'generate_proxy' set to True, " + "but the 'remoteAccess' element is not correctly defined." + ) + elif not self.remote_access.host: + logger.error(f"component {self.name}: 'remoteAccess' element is defined, but host is not specified.") + elif not self.remote_access.port: + logger.error(f"component {self.name}: 'remoteAccess' element is defined, but port is not specified.") + else: + self.fmu = self.fmu.proxify(self.remote_access.host, self.remote_access.port) + # if NTNU-IHB fmu-proxy code is used, use '-proxy' reference + self.name = f"{self.name}-proxy" + + def _init_units(self) -> None: + self._units = deepcopy(self.fmu.units) + + def _init_variables(self) -> None: + self._variables = deepcopy(self.fmu.variables) + + for variable_name, variable in self._initial_values.items(): + if variable.causality: + self._variables[variable_name].causality = variable.causality + if variable.variability: + self._variables[variable_name].variability = variable.variability + if variable.start: + self._variables[variable_name].start = variable.start + + @property + def variables_with_start_values(self) -> dict[str, ScalarVariable]: + """Returns a dict with all scalar variables for which start values (initial values) + are defined in the component. + + Returns + ------- + dict[str, ScalarVariable] + dict with all scalar variables with start values defined. + """ + return self._initial_values + + @property + def units(self) -> dict[str, Unit]: + """Returns a dict with all units defined in the component. + + Returns + ------- + dict[str, Unit] + dict with all units + """ + return self._units + + @property + def variables(self) -> dict[str, ScalarVariable]: + """Returns a dict with all scalar variables defined in the component. + + Returns + ------- + dict[str, ScalarVariable] + dict with all scalar variables + """ + return self._variables + + @property + def connectors(self) -> dict[str, Connector]: + """Returns a dict with all connectors defined by the component. + + Returns + ------- + dict[str, Connector] + dict with all connectors + """ + return self._connectors + + def write_osp_model_description_xml(self) -> None: + """Write the _OspModelDescription.xml file in the current working directory.""" + # sourcery skip: extract-method, merge-dict-assign + osp_model_description_file = self.fmu.file.parent.absolute() / f"{self.name}_OspModelDescription.xml" + self._clean(osp_model_description_file) + + osp_model_description: dict[str, Any] = {} + + # Unit Definitions + unit_definitions: dict[str, dict[str, dict[str, Any]]] = {} + for unit in self.units.values(): + unit_definition: dict[str, dict[str, Any]] = {"_attributes": {}} + unit_definition["_attributes"]["name"] = unit.name + if unit.base_unit: + unit_definition["BaseUnit"] = {"_attributes": {}} + if unit.base_unit.kg: + unit_definition["BaseUnit"]["_attributes"]["kg"] = unit.base_unit.kg + if unit.base_unit.m: + unit_definition["BaseUnit"]["_attributes"]["m"] = unit.base_unit.m + if unit.base_unit.s: + unit_definition["BaseUnit"]["_attributes"]["s"] = unit.base_unit.s + if unit.base_unit.A: + unit_definition["BaseUnit"]["_attributes"]["A"] = unit.base_unit.A + if unit.base_unit.K: + unit_definition["BaseUnit"]["_attributes"]["K"] = unit.base_unit.K + if unit.base_unit.mol: + unit_definition["BaseUnit"]["_attributes"]["mol"] = unit.base_unit.mol + if unit.base_unit.cd: + unit_definition["BaseUnit"]["_attributes"]["cd"] = unit.base_unit.cd + if unit.base_unit.rad: + unit_definition["BaseUnit"]["_attributes"]["rad"] = unit.base_unit.rad + if unit.base_unit.factor: + unit_definition["BaseUnit"]["_attributes"]["factor"] = unit.base_unit.factor + if unit.base_unit.offset: + unit_definition["BaseUnit"]["_attributes"]["offset"] = unit.base_unit.offset + if unit.display_unit: + unit_definition["DisplayUnit"] = {"_attributes": {}} + unit_definition["DisplayUnit"]["_attributes"]["name"] = unit.display_unit.name + unit_definition["DisplayUnit"]["_attributes"]["factor"] = unit.display_unit.factor + unit_definition["DisplayUnit"]["_attributes"]["offset"] = unit.display_unit.offset + unit_definitions[f"{self.counter():06d}_Unit"] = unit_definition + osp_model_description["UnitDefinitions"] = unit_definitions + + # Variable Groups + variable_groups = {} + for variable_name, variable in self.variables.items(): + if not variable.quantity: + logger.warning(f"component {self.name}: no quantity defined for variable {variable_name}") + if not variable.unit: + logger.warning(f"component {self.name}: no unit defined for variable {variable_name}") + quantity_name = variable.quantity or "UNKNOWN" + quantity_unit = variable.unit or "UNKNOWN" + variable_groups[f"{self.counter():06d}_Generic"] = { + "_attributes": {"name": quantity_name}, + quantity_name: { + "_attributes": {"name": quantity_name}, + "Variable": { + "_attributes": { + "ref": variable_name, + "unit": quantity_unit, + } + }, + }, + } + osp_model_description["VariableGroups"] = variable_groups + + # _xmlOpts + osp_model_description["_xmlOpts"] = { + "_nameSpaces": {"osp": "https://opensimulationplatform.com/xsd/OspModelDescription-1.0.0.xsd"}, + "_rootTag": "ospModelDescription", + } + + DictWriter.write(osp_model_description, osp_model_description_file) + + def _clean(self, file_to_remove: str | Path) -> None: + """Clean up single file.""" + if isinstance(file_to_remove, str): + file_to_remove = Path.cwd() / file_to_remove + file_to_remove.unlink(missing_ok=True) + + +@dataclass() +class RemoteAccess: + """Data class holding the host and port information for a remote connection.""" + + host: str = field(default_factory=lambda: "") + port: int = 0 diff --git a/src/ospx/connection.py b/src/ospx/connection.py index a17c2a03..719d3363 100644 --- a/src/ospx/connection.py +++ b/src/ospx/connection.py @@ -1,199 +1,199 @@ -import logging -from typing import Union - -from ospx.component import Component -from ospx.connector import Connector -from ospx.fmi.variable import ScalarVariable - -__ALL__ = ["Endpoint", "Connection"] - -logger = logging.getLogger(__name__) - - -class Endpoint: - """Endpoints relate each side of a connection to distinct variables or connectors. - - An endpoint is for a connection what a connector is for a component. - A connection has two endpoints, one for each side. - Connection endpoints are hence the 'counterparts' to component's connectors. - """ - - def __init__( - self, - component: Component, - connector: Union[Connector, None] = None, - variable: Union[ScalarVariable, None] = None, - ): - self.component: Component = component - self._connector: Union[Connector, None] = None - self._variable: Union[ScalarVariable, None] = None - if connector: - self.connector = connector - if variable: - self.variable = variable - - @property - def connector(self) -> Union[Connector, None]: - """Returns the connector this endpoint refers to, if defined. - - Returns - ------- - Union[Connector, None] - the connector, if defined. Otherwise None. - """ - return self._connector - - @connector.setter - def connector(self, connector: Connector): - """Set the connector this endpoint shall refer to. - - Parameters - ---------- - connector : Connector - the connector - """ - if self._variable: - msg = ( - f"Inconsistency: Connection endpoint defines both connector and variable.\n" - f"connector: {connector.name}\nvariable: {self._variable.name}\n" - "connector is used. variable is omitted." - ) - logger.warning(msg) - self._variable = None - self._connector = connector - - @property - def variable(self) -> Union[ScalarVariable, None]: - """Returns the scalar variable this endpoint refers to, if defined. - - Returns - ------- - Union[ScalarVariable, None] - the scalar variable, if defined. Otherwise None. - """ - return self._variable - - @variable.setter - def variable(self, variable: ScalarVariable): - """Set the scalar variable this endpoint shall refer to. - - Parameters - ---------- - variable : ScalarVariable - the scalar variable - """ - if self._connector: - msg = ( - f"Inconsistency: Connection endpoint defines both connector and variable.\n" - f"connector: {self._connector.name}\nvariable: {variable.name}\n" - "connector is omitted. variable is used." - ) - logger.warning(msg) - self._connector = None - self._variable = variable - - @property - def variable_name(self) -> str: - """Returns the name of the scalar variable this endpoint refers to. - - Returns - ------- - str - the name of the scalar variable. - """ - if self._connector: - return self._connector.variable_name - elif self._variable: - return self._variable.name - else: - return "UNKNOWN" - - @property - def is_valid(self) -> bool: - """Consistency check. Returns True if endpoint is defined and valid. - - Returns - ------- - bool - True if valid. Otherwise False. - """ - return bool(self.component and (self.connector or self.variable)) - - -class Connection: - """A connection is the primary artefact to connect outputs and inputs of componoents in a system. - - A connection connects an output connector of one component with an input connector of another component. - """ - - def __init__( - self, - name: str, - source_endpoint: Endpoint, - target_endpoint: Endpoint, - ): - self.name: str = name - self.source_endpoint: Endpoint = source_endpoint - self.target_endpoint: Endpoint = target_endpoint - - @property - def is_variable_connection(self) -> bool: - """Returns True if connection is a single variable connection. - - Returns - ------- - bool - True if single variable connection. Otherwise False. - """ - if not self.source_endpoint: - return False - if self.source_endpoint.variable: - return True - if self.source_endpoint.connector: - return self.source_endpoint.connector.is_single_connector - return False - - @property - def is_variable_group_connection(self) -> bool: - """Returns True if connection is a variable group connection. - - Returns - ------- - bool - True if variable group connection. Otherwise False. - """ - if not self.source_endpoint: - return False - if self.source_endpoint.variable: - return False - if self.source_endpoint.connector: - return self.source_endpoint.connector.is_group_connector - return False - - @property - def is_valid(self) -> bool: - """Consistency check. Returns True if connection is found fully defined and valid. - - Returns - ------- - bool - True if valid. Otherwise False. - """ - if not (self.source_endpoint.is_valid and self.target_endpoint.is_valid): - return False - if self.source_endpoint.variable and self.target_endpoint.variable: - return True - if self.source_endpoint.connector and self.target_endpoint.connector: - _both_are_single: bool = ( - self.source_endpoint.connector.is_single_connector - and self.target_endpoint.connector.is_single_connector - ) - _both_are_group: bool = ( - self.source_endpoint.connector.is_group_connector and self.target_endpoint.connector.is_group_connector - ) - return _both_are_single or _both_are_group - elif self.source_endpoint.connector: - return self.source_endpoint.connector.is_single_connector - elif self.target_endpoint.connector: - return self.target_endpoint.connector.is_single_connector - return False +import logging + +from ospx.component import Component +from ospx.connector import Connector +from ospx.fmi.variable import ScalarVariable + +__ALL__ = ["Endpoint", "Connection"] + +logger = logging.getLogger(__name__) + + +class Endpoint: + """Endpoints relate each side of a connection to distinct variables or connectors. + + An endpoint is for a connection what a connector is for a component. + A connection has two endpoints, one for each side. + Connection endpoints are hence the 'counterparts' to component's connectors. + """ + + def __init__( + self, + component: Component, + connector: Connector | None = None, + variable: ScalarVariable | None = None, + ) -> None: + self.component: Component = component + self._connector: Connector | None = None + self._variable: ScalarVariable | None = None + if connector: + self.connector = connector + if variable: + self.variable = variable + + @property + def connector(self) -> Connector | None: + """Returns the connector this endpoint refers to, if defined. + + Returns + ------- + Union[Connector, None] + the connector, if defined. Otherwise None. + """ + return self._connector + + @connector.setter + def connector(self, connector: Connector) -> None: + """Set the connector this endpoint shall refer to. + + Parameters + ---------- + connector : Connector + the connector + """ + if self._variable: + msg = ( + f"Inconsistency: Connection endpoint defines both connector and variable.\n" + f"connector: {connector.name}\nvariable: {self._variable.name}\n" + "connector is used. variable is omitted." + ) + logger.warning(msg) + self._variable = None + self._connector = connector + + @property + def variable(self) -> ScalarVariable | None: + """Returns the scalar variable this endpoint refers to, if defined. + + Returns + ------- + Union[ScalarVariable, None] + the scalar variable, if defined. Otherwise None. + """ + return self._variable + + @variable.setter + def variable(self, variable: ScalarVariable) -> None: + """Set the scalar variable this endpoint shall refer to. + + Parameters + ---------- + variable : ScalarVariable + the scalar variable + """ + if self._connector: + msg = ( + f"Inconsistency: Connection endpoint defines both connector and variable.\n" + f"connector: {self._connector.name}\nvariable: {variable.name}\n" + "connector is omitted. variable is used." + ) + logger.warning(msg) + self._connector = None + self._variable = variable + + @property + def variable_name(self) -> str: + """Returns the name of the scalar variable this endpoint refers to. + + Returns + ------- + str + the name of the scalar variable. + """ + # sourcery skip: assign-if-exp, reintroduce-else + if self._connector: + return self._connector.variable_name + if self._variable: + return self._variable.name + return "UNKNOWN" + + @property + def is_valid(self) -> bool: + """Consistency check. Returns True if endpoint is defined and valid. + + Returns + ------- + bool + True if valid. Otherwise False. + """ + return bool(self.component and (self.connector or self.variable)) + + +class Connection: + """A connection is the primary artefact to connect outputs and inputs of componoents in a system. + + A connection connects an output connector of one component with an input connector of another component. + """ + + def __init__( + self, + name: str, + source_endpoint: Endpoint, + target_endpoint: Endpoint, + ) -> None: + self.name: str = name + self.source_endpoint: Endpoint = source_endpoint + self.target_endpoint: Endpoint = target_endpoint + + @property + def is_variable_connection(self) -> bool: + """Returns True if connection is a single variable connection. + + Returns + ------- + bool + True if single variable connection. Otherwise False. + """ + if not self.source_endpoint: + return False + if self.source_endpoint.variable: + return True + if self.source_endpoint.connector: + return self.source_endpoint.connector.is_single_connector + return False + + @property + def is_variable_group_connection(self) -> bool: + """Returns True if connection is a variable group connection. + + Returns + ------- + bool + True if variable group connection. Otherwise False. + """ + if not self.source_endpoint: + return False + if self.source_endpoint.variable: + return False + if self.source_endpoint.connector: + return self.source_endpoint.connector.is_group_connector + return False + + @property + def is_valid(self) -> bool: + """Consistency check. Returns True if connection is found fully defined and valid. + + Returns + ------- + bool + True if valid. Otherwise False. + """ + if not (self.source_endpoint.is_valid and self.target_endpoint.is_valid): + return False + if self.source_endpoint.variable and self.target_endpoint.variable: + return True + if self.source_endpoint.connector: + if self.target_endpoint.connector: + _both_are_single: bool = ( + self.source_endpoint.connector.is_single_connector + and self.target_endpoint.connector.is_single_connector + ) + _both_are_group: bool = ( + self.source_endpoint.connector.is_group_connector + and self.target_endpoint.connector.is_group_connector + ) + return _both_are_single or _both_are_group + return self.source_endpoint.connector.is_single_connector + if self.target_endpoint.connector: + return self.target_endpoint.connector.is_single_connector + return False diff --git a/src/ospx/connector.py b/src/ospx/connector.py index 8553554b..6deaa5a1 100644 --- a/src/ospx/connector.py +++ b/src/ospx/connector.py @@ -1,154 +1,153 @@ -import logging -from typing import Union - -__ALL__ = ["Connector"] - -logger = logging.getLogger(__name__) - - -class Connector: - """Connectors allow to explicitely make public a components scalar variable or variable group - at the component's outer interface. - - An connector is for a component what an endpoint is for a connection. - Connectors are hence the 'counterparts' to connection's endpoints. - """ - - def __init__( - self, - name: str, - variable: Union[str, None] = None, - variable_group: Union[str, None] = None, - type: Union[str, None] = None, - ): - self.name: str = name - self._variable: Union[str, None] = None - self._variable_group: Union[str, None] = None - self._type: Union[str, None] = None - if variable: - self.variable = variable - if variable_group: - self.variable_group = variable_group - if type: - self.type = type - - @property - def variable(self) -> Union[str, None]: - """Returns the scalar variable this connector is defined for. - - Returns - ------- - Union[str, None] - the scalar variable, if connector is a group connector. Otherwise None. - """ - return self._variable - - @variable.setter - def variable(self, variable: str): - """Set the scalar variable this connector shall be defined for. - - Parameters - ---------- - variable : str - the scalar variable - """ - if self._variable_group: - msg = ( - f"Inconsistency: Connector {self.name} defines both variable and variableGroup.\n" - f"variable: {variable}\nvariableGroup: {self._variable_group}\n" - "variable is used. variableGroup is omitted." - ) - logger.warning(msg) - self._variable_group = None - self._variable = variable - - @property - def variable_group(self) -> Union[str, None]: - """Returns the variable group this connector is defined for. - - Returns - ------- - Union[str, None] - the variable group, if connector is a group connector. Otherwise None. - """ - return self._variable_group - - @variable_group.setter - def variable_group(self, variable_group: str): - """Set the variable group this connector shall be defined for. - - Parameters - ---------- - variable_group : str - the variable group - """ - if self._variable: - msg = ( - f"Inconsistency: Connector {self.name} defines both variable and variableGroup.\n" - f"variable: {self._variable}\nvariableGroup: {variable_group}\n" - "variable is omitted. variableGroup is used." - ) - logger.warning(msg) - self._variable = None - self._variable_group = variable_group - - @property - def type(self) -> Union[str, None]: - """Returns the type of the connector.""" - return self._type - - @type.setter - def type(self, type: str): - """Set the type of the connector. - - Valid values are: - "input" - "output" - """ - valid_types: list[str] = [ - "input", - "output", - ] - if type not in valid_types: - logger.error(f"connector {self.name}: type '{type}' is invalid.") - return - self._type = type - return - - @property - def is_single_connector(self) -> bool: - """Returns True if connector is a single variable connector. - - Returns - ------- - bool - True if single variable connector. Otherwise False. - """ - return bool(self._variable) - - @property - def is_group_connector(self) -> bool: - """Returns True if connector is a variable group connector. - - Returns - ------- - bool - True if variable group connector. Otherwise False. - """ - return bool(self._variable_group) - - @property - def variable_name(self) -> str: - """Returns the name of the variable or variable group this connector is defined for. - - Returns - ------- - str - name of the variable or variable group - """ - if self._variable: - return self._variable - elif self._variable_group: - return self._variable_group - else: - return "UNKNOWN" +import logging + +__ALL__ = ["Connector"] + +logger = logging.getLogger(__name__) + + +class Connector: + """Connectors allow to explicitely make public a components scalar variable or variable group + at the component's outer interface. + + An connector is for a component what an endpoint is for a connection. + Connectors are hence the 'counterparts' to connection's endpoints. + """ + + def __init__( + self, + name: str, + variable: str | None = None, + variable_group: str | None = None, + type: str | None = None, # noqa: A002 + ) -> None: + self.name: str = name + self._variable: str | None = None + self._variable_group: str | None = None + self._type: str | None = None + if variable: + self.variable = variable + if variable_group: + self.variable_group = variable_group + if type: + self.type = type + + @property + def variable(self) -> str | None: + """Returns the scalar variable this connector is defined for. + + Returns + ------- + Union[str, None] + the scalar variable, if connector is a group connector. Otherwise None. + """ + return self._variable + + @variable.setter + def variable(self, variable: str) -> None: + """Set the scalar variable this connector shall be defined for. + + Parameters + ---------- + variable : str + the scalar variable + """ + if self._variable_group: + msg = ( + f"Inconsistency: Connector {self.name} defines both variable and variableGroup.\n" + f"variable: {variable}\nvariableGroup: {self._variable_group}\n" + "variable is used. variableGroup is omitted." + ) + logger.warning(msg) + self._variable_group = None + self._variable = variable + + @property + def variable_group(self) -> str | None: + """Returns the variable group this connector is defined for. + + Returns + ------- + Union[str, None] + the variable group, if connector is a group connector. Otherwise None. + """ + return self._variable_group + + @variable_group.setter + def variable_group(self, variable_group: str) -> None: + """Set the variable group this connector shall be defined for. + + Parameters + ---------- + variable_group : str + the variable group + """ + if self._variable: + msg = ( + f"Inconsistency: Connector {self.name} defines both variable and variableGroup.\n" + f"variable: {self._variable}\nvariableGroup: {variable_group}\n" + "variable is omitted. variableGroup is used." + ) + logger.warning(msg) + self._variable = None + self._variable_group = variable_group + + @property + def type(self) -> str | None: + """Returns the type of the connector.""" + return self._type + + @type.setter + def type(self, value: str) -> None: + """Set the type of the connector. + + Valid values are: + "input" + "output" + """ + valid_types: list[str] = [ + "input", + "output", + ] + if value not in valid_types: + logger.error(f"connector {self.name}: type '{value}' is invalid.") + return + self._type = value + return + + @property + def is_single_connector(self) -> bool: + """Returns True if connector is a single variable connector. + + Returns + ------- + bool + True if single variable connector. Otherwise False. + """ + return bool(self._variable) + + @property + def is_group_connector(self) -> bool: + """Returns True if connector is a variable group connector. + + Returns + ------- + bool + True if variable group connector. Otherwise False. + """ + return bool(self._variable_group) + + @property + def variable_name(self) -> str: + """Returns the name of the variable or variable group this connector is defined for. + + Returns + ------- + str + name of the variable or variable group + """ + # sourcery skip: assign-if-exp, reintroduce-else + if self._variable: + return self._variable + if self._variable_group: + return self._variable_group + return "UNKNOWN" diff --git a/src/ospx/fmi/__init__.py b/src/ospx/fmi/__init__.py index 20962f95..e9a4e8f3 100644 --- a/src/ospx/fmi/__init__.py +++ b/src/ospx/fmi/__init__.py @@ -1,11 +1,11 @@ -from ospx.fmi.experiment import Experiment as Experiment # noqa: F401 -from ospx.fmi.unit import ( # noqa: F401 - Unit as Unit, - BaseUnit as BaseUnit, - DisplayUnit as DisplayUnit, -) -from ospx.fmi.variable import ( # noqa: F401 - ScalarVariable as ScalarVariable, - get_fmi_data_type as get_fmi_data_type, -) -from ospx.fmi.fmu import FMU as FMU # noqa: F401 +from ospx.fmi.experiment import Experiment +from ospx.fmi.unit import ( + Unit, + BaseUnit, + DisplayUnit, +) +from ospx.fmi.variable import ( + ScalarVariable, + get_fmi_data_type, +) +from ospx.fmi.fmu import FMU diff --git a/src/ospx/fmi/experiment.py b/src/ospx/fmi/experiment.py index e7d1fc9f..4f34f891 100644 --- a/src/ospx/fmi/experiment.py +++ b/src/ospx/fmi/experiment.py @@ -1,20 +1,19 @@ -import logging -from dataclasses import dataclass -from typing import Union - -__ALL__ = ["Experiment"] - -logger = logging.getLogger(__name__) - - -@dataclass() -class Experiment: - """Data class for the DefaultExperiment element inside fmi 2.0 ModelDescription. - - See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2ModelDescription.xsd - """ - - start_time: Union[float, None] = None - stop_time: Union[float, None] = None - tolerance: Union[float, None] = None - step_size: Union[float, None] = None +import logging +from dataclasses import dataclass + +__ALL__ = ["Experiment"] + +logger = logging.getLogger(__name__) + + +@dataclass() +class Experiment: + """Data class for the DefaultExperiment element inside fmi 2.0 ModelDescription. + + See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2ModelDescription.xsd + """ + + start_time: float | None = None + stop_time: float | None = None + tolerance: float | None = None + step_size: float | None = None diff --git a/src/ospx/fmi/fmu.py b/src/ospx/fmi/fmu.py index d9996455..8edec98c 100644 --- a/src/ospx/fmi/fmu.py +++ b/src/ospx/fmi/fmu.py @@ -1,371 +1,377 @@ -import logging -import os -import platform -import re -from copy import deepcopy -from datetime import date, datetime -from pathlib import Path -from shutil import copyfile -from typing import Any, Dict, Mapping, MutableMapping, Union -from zipfile import ZipFile - -from dictIO import CppDict, XmlFormatter, XmlParser -from dictIO.utils.counter import BorgCounter - -from ospx.fmi import BaseUnit, DisplayUnit, Experiment, ScalarVariable, Unit -from ospx.utils.dict import find_key, find_type_identifier_in_keys, shrink_dict -from ospx.utils.zip import ( - add_file_content_to_zip, - read_file_content_from_zip, - remove_files_from_zip, - rename_file_in_zip, -) - -__ALL__ = ["FMU"] - -logger = logging.getLogger(__name__) - - -class FMU: - """Class to read and interact with an fmi 2.0 Functional Mockup Unit (FMU). - - See also https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2ModelDescription.xsd - """ - - def __init__(self, file: Union[str, os.PathLike[str]]): - # Make sure fmu_file argument is of type Path. If not, cast it to Path type. - file = file if isinstance(file, Path) else Path(file) - if not file.exists(): - logger.error(f"DictParser: File {file} not found.") - raise FileNotFoundError(file) - - self.file: Path = file - self.model_description: CppDict = self._read_model_description() - self.counter = BorgCounter() - - def _read_model_description(self) -> CppDict: - model_description = CppDict(Path("modelDescription.xml")) - xml_parser = XmlParser() - - logger.info(f"{self.file.name}: read modelDescription.xml") - - if file_content := read_file_content_from_zip(self.file, "modelDescription.xml"): - model_description = xml_parser.parse_string(file_content, model_description) - - self._clean_solver_internal_variables(model_description) - - self.model_description = model_description - - return model_description - - def _write_model_description( - self, - model_description: Union[CppDict, None] = None, - write_inside_fmu: bool = False, - ): - """Save updated model_description both inside FMU as well as separate file in the FMUs directory.""" - if model_description: - self.model_description = model_description - - self.model_description["_xmlOpts"]["_nameSpaces"] = { - "xs": "file:///C:/Software/OSP/xsd/fmi3ModelDescription.xsd" - } - - formatter = XmlFormatter() - formatted_xml = formatter.to_string(self.model_description) - - logger.info(f"{self.file.name}: write modelDescription.xml") - - # Write internal modelDescription.xml (inside FMU) - if write_inside_fmu: - _ = remove_files_from_zip(self.file, "modelDescription.xml") - _ = add_file_content_to_zip(self.file, "modelDescription.xml", formatted_xml) - - # Write external modelDescription.xml (separate file, beside FMU) - external_file = self.file.parent.absolute() / f"{self.file.stem}_ModelDescription.xml" - with open(external_file, "w") as f: - _ = f.write(formatted_xml) - - return - - @property - def units(self) -> Dict[str, Unit]: - """Returns a dict with all units defined in the FMU. - - Returns - ------- - Dict[str, Unit] - dict with all units - """ - model_unit_definitions: MutableMapping[Any, Any] = {} - if unit_definitions_key := find_key(self.model_description, "UnitDefinitions$"): - model_unit_definitions = self.model_description[unit_definitions_key] - # make sure unit definitions are unique (e.g. to keep XML files clean) - model_unit_definitions = shrink_dict(model_unit_definitions, unique_key=["_attributes", "name"]) - unit_definitions: dict[str, Unit] = {} - for u in model_unit_definitions.values(): - unit = Unit() - unit.name = u["_attributes"]["name"] - # BaseUnit - if base_unit_key := find_key(u, "BaseUnit$"): - unit.base_unit = BaseUnit() - if "kg" in u[base_unit_key]["_attributes"]: - unit.base_unit.kg = u[base_unit_key]["_attributes"]["kg"] - if "m" in u[base_unit_key]["_attributes"]: - unit.base_unit.m = u[base_unit_key]["_attributes"]["m"] - if "s" in u[base_unit_key]["_attributes"]: - unit.base_unit.s = u[base_unit_key]["_attributes"]["s"] - if "A" in u[base_unit_key]["_attributes"]: - unit.base_unit.A = u[base_unit_key]["_attributes"]["A"] - if "K" in u[base_unit_key]["_attributes"]: - unit.base_unit.K = u[base_unit_key]["_attributes"]["K"] - if "mol" in u[base_unit_key]["_attributes"]: - unit.base_unit.mol = u[base_unit_key]["_attributes"]["mol"] - if "cd" in u[base_unit_key]["_attributes"]: - unit.base_unit.cd = u[base_unit_key]["_attributes"]["cd"] - if "rad" in u[base_unit_key]["_attributes"]: - unit.base_unit.rad = u[base_unit_key]["_attributes"]["rad"] - if "factor" in u[base_unit_key]["_attributes"]: - unit.base_unit.factor = u[base_unit_key]["_attributes"]["factor"] - if "offset" in u[base_unit_key]["_attributes"]: - unit.base_unit.offset = u[base_unit_key]["_attributes"]["offset"] - # DisplayUnit - if display_unit_key := find_key(u, "DisplayUnit$"): - unit.display_unit = DisplayUnit() - if "name" in u[display_unit_key]["_attributes"]: - unit.display_unit.name = u[display_unit_key]["_attributes"]["name"] - if "factor" in u[display_unit_key]["_attributes"]: - unit.display_unit.factor = u[display_unit_key]["_attributes"]["factor"] - if "offset" in u[display_unit_key]["_attributes"]: - unit.display_unit.offset = u[display_unit_key]["_attributes"]["offset"] - unit_definitions[unit.name] = unit - return unit_definitions - - @property - def variables(self) -> dict[str, ScalarVariable]: - """Returns a dict with all scalar variables defined in the FMU. - - Returns - ------- - dict[str, ScalarVariable] - dict with all scalar variables - """ - model_variables_key = find_key(self.model_description, "ModelVariables$") - if not model_variables_key: - return {} - # Read model variables from model description - # (without "_" and "settings" as these are proprietary variables from iti) - model_variables = { - k: v - for k, v in self.model_description[model_variables_key].items() - if not re.match("^(_|settings)", v["_attributes"]["name"]) - } - # Translate variable attributes from model description into Variable objects - variables: dict[str, ScalarVariable] = {} - for k, v in model_variables.items(): - variable_type: str = re.sub(r"^\d{6}_", "", k) - if variable_type == "ScalarVariable": - variable = ScalarVariable(name=v["_attributes"]["name"]) - if "valueReference" in v["_attributes"]: - variable.value_reference = v["_attributes"]["valueReference"] - if "description" in v["_attributes"]: - variable.description = v["_attributes"]["description"] - if "causality" in v["_attributes"]: - variable.causality = v["_attributes"]["causality"] - if "variability" in v["_attributes"]: - variable.variability = v["_attributes"]["variability"] - if type_identifier := find_type_identifier_in_keys(v): - variable.data_type = type_identifier - type_key = find_key(v, f"{type_identifier}$") - if v[type_key] and "_attributes" in v[type_key]: - if "quantity" in v[type_key]["_attributes"]: - variable.quantity = v[type_key]["_attributes"]["quantity"] - if "unit" in v[type_key]["_attributes"]: - variable.unit = v[type_key]["_attributes"]["unit"] - if "display_unit" in v[type_key]["_attributes"]: - variable.display_unit = v[type_key]["_attributes"]["display_unit"] - if "start" in v[type_key]["_attributes"]: - variable.start = v[type_key]["_attributes"]["start"] - variables[variable.name] = variable - - return variables - - @property - def default_experiment(self) -> Union[Experiment, None]: - """Returns the default experiment, if defined in the FMU. - - Returns - ------- - Union[Experiment, None] - the default experiment, if defined. Otherwise None. - """ - default_experiment_key = find_key(self.model_description, "DefaultExperiment$") - if not default_experiment_key: - return None - default_experiment = Experiment() - default_experiment_properties = self.model_description[default_experiment_key] - if "_attributes" in default_experiment_properties: - default_experiment_attributes = default_experiment_properties["_attributes"] - if "startTime" in default_experiment_attributes: - default_experiment.start_time = default_experiment_attributes["startTime"] - if "stopTime" in default_experiment_attributes: - default_experiment.stop_time = default_experiment_attributes["stopTime"] - if "tolerance" in default_experiment_attributes: - default_experiment.tolerance = default_experiment_attributes["tolerance"] - if "stepSize" in default_experiment_attributes: - default_experiment.step_size = default_experiment_attributes["stepSize"] - return default_experiment - - def copy(self, new_name: str): - """Save a copy of the FMU with a new name. - - Parameters - ---------- - new_name : str - Intended name of the copy. The new name must be different from the existing name. - - Returns - ------- - FMU - The new FMU - """ - # Prepare - new_name = Path(new_name).stem - existing_file_name = self.file.stem - if new_name == existing_file_name: - logger.error(f"{self.file.name} copy: new name {new_name} is identical with existing name. copy() aborted.") - new_model_description: CppDict = deepcopy(self.model_description) - new_file = self.file.parent.absolute() / f"{new_name}.fmu" - - # Copy FMU - _ = copyfile(self.file, new_file) - - # Rename *.dll files in FMU to match new fmu name - with ZipFile(new_file, "r") as document: - dll_file_names = [ - file.filename - for file in document.infolist() - if re.search(r".*\.dll$", file.filename) and existing_file_name in file.filename - ] - new_dll_file_names = [re.sub(existing_file_name, new_name, dll_file_name) for dll_file_name in dll_file_names] - for dll_file_name, new_dll_file_name in zip(dll_file_names, new_dll_file_names): - logger.info(f"{self.file.name} copy: renaming dll {dll_file_name} to {new_dll_file_name}") - _ = rename_file_in_zip(new_file, dll_file_name, new_dll_file_name) - - # Rename in modelDescription.xml - new_model_description["_xmlOpts"]["_rootAttributes"]["modelName"] = new_name - - # Rename in modelDescription.xml - # (STC requires consistency between and ) - co_simulation: MutableMapping[Any, Any] = new_model_description[ - find_key(new_model_description, "CoSimulation$") - ] - co_simulation["_attributes"]["modelIdentifier"] = new_name - - # Log the update in modelDescription.xml - self._log_update_in_model_description(new_model_description) - - # Write updated modelDescription.xml into new FMU - new_fmu = FMU(new_file) - new_fmu._write_model_description(new_model_description) - - return new_fmu - - def proxify(self, host: str, port: int): - """Create a proxy version of the FMU. - - For details see https://github.com/NTNU-IHB/FMU-proxy - - Parameters - ---------- - host : str - Remote host - port : int - Remote port - - Returns - ------- - FMU - The created proxy version of the FMU - """ - - import subprocess - - remote_string = f"--remote={host}:{port}" - command = f"fmu-proxify {self.file.name} {remote_string}" - try: - _ = subprocess.run(command, timeout=60) - except subprocess.TimeoutExpired: - logger.exception(f"Timeout occured when calling {command}.") - return self - proxy_fmu_file = self.file.parent.absolute() / f"{self.file.stem}-proxy.fmu" - return FMU(proxy_fmu_file) - - def _modify_start_values(self, variables_with_start_values: dict[str, ScalarVariable]): - """Modify the start values of variables inside the FMUs modelDescription.xml.""" - - logger.info(f"{self.file.name}: update start values of variables in modelDescription.xml") # 2 - - model_variables: MutableMapping[Any, Any] = self.model_description[ - find_key(self.model_description, "ModelVariables$") - ] - - names_of_variables_with_start_values: list[str] = [ - variable.name for _, variable in variables_with_start_values.items() - ] - - for model_variable_key, model_variable_properties in model_variables.items(): - model_variable_name: str = model_variable_properties["_attributes"]["name"] - - if model_variable_name in names_of_variables_with_start_values: - variable_with_start_values = variables_with_start_values[model_variable_name] - type_identifier = find_type_identifier_in_keys(model_variable_properties) - type_key = find_key(model_variable_properties, f"{type_identifier}$") - - logger.info( - f"{self.file.name}: update start values for variable {model_variable_name}:\n" - f"\tstart:\t\t{variable_with_start_values.start}\n" - f"\tcausality:\t {variable_with_start_values.causality}\n" - f"\tvariability:\t{variable_with_start_values.variability}" - ) - - model_variables[model_variable_key][type_key]["_attributes"]["start"] = variable_with_start_values.start - model_variables[model_variable_key]["_attributes"]["causality"] = variable_with_start_values.causality - model_variables[model_variable_key]["_attributes"]["variability"] = ( - variable_with_start_values.variability - ) - - self._log_update_in_model_description() - - def _log_update_in_model_description(self, model_description: Union[CppDict, None] = None): - model_description = model_description or self.model_description - - logger.info(f"{self.file.name}: update ") # 2 - # Author - old_author = model_description["_xmlOpts"]["_rootAttributes"]["author"] - if platform.system() == "Linux": - new_author = os.environ["USER"] - else: - new_author = os.environ["USERNAME"] - model_description["_xmlOpts"]["_rootAttributes"]["author"] = new_author - # DateAndTime - old_date = model_description["_xmlOpts"]["_rootAttributes"]["generationDateAndTime"] - new_date = str(datetime.now()) - model_description["_xmlOpts"]["_rootAttributes"]["generationDateAndTime"] = new_date - # Log modifications in attribute - add_description_string = ( - f"\nmodified {date.today()}:\n" - f"\tauthor {old_author} to {new_author}\n" - f"\tgenerationDateAndTime {old_date} to {new_date}\n" - ) - model_description["_xmlOpts"]["_rootAttributes"]["description"] += add_description_string - - # @TODO: Check when and where this method needs to be called. And why.. - # CLAROS, 2022-05-24 - def _clean_solver_internal_variables(self, model_description: MutableMapping[Any, Any]): - """Clean solver internal variables, such as '_iti_...'.""" - model_variables: Mapping[Any, Any] = model_description[find_key(model_description, "ModelVariables$")] - model_name = model_description["_xmlOpts"]["_rootAttributes"]["modelName"] - for model_variable_key in model_variables: - if "_origin" in model_variables[model_variable_key]: - model_variables[model_variable_key]["_origin"] = model_name +from __future__ import annotations + +import logging +import os +import platform +import re +from copy import deepcopy +from datetime import UTC, datetime +from pathlib import Path +from shutil import copyfile +from typing import TYPE_CHECKING, Any +from zipfile import ZipFile + +from dictIO import CppDict, XmlFormatter, XmlParser +from dictIO.utils.counter import BorgCounter + +from ospx.fmi import BaseUnit, DisplayUnit, Experiment, ScalarVariable, Unit +from ospx.utils.dict import find_key, find_type_identifier_in_keys, shrink_dict +from ospx.utils.zip import ( + add_file_content_to_zip, + read_file_content_from_zip, + remove_files_from_zip, + rename_file_in_zip, +) + +if TYPE_CHECKING: + from collections.abc import Mapping, MutableMapping + +__ALL__ = ["FMU"] + +logger = logging.getLogger(__name__) + + +class FMU: + """Class to read and interact with an fmi 2.0 Functional Mockup Unit (FMU). + + See also https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2ModelDescription.xsd + """ + + def __init__(self, file: str | os.PathLike[str]) -> None: + # Make sure fmu_file argument is of type Path. If not, cast it to Path type. + file = file if isinstance(file, Path) else Path(file) + if not file.exists(): + logger.error(f"DictParser: File {file} not found.") + raise FileNotFoundError(file) + + self.file: Path = file + self.model_description: CppDict = self._read_model_description() + self.counter = BorgCounter() + + def _read_model_description(self) -> CppDict: + model_description = CppDict(Path("modelDescription.xml")) + xml_parser = XmlParser() + + logger.info(f"{self.file.name}: read modelDescription.xml") + + if file_content := read_file_content_from_zip(self.file, "modelDescription.xml"): + model_description = xml_parser.parse_string(file_content, model_description) + + self._clean_solver_internal_variables(model_description) + + self.model_description = model_description + + return model_description + + def _write_model_description( + self, + model_description: CppDict | None = None, + *, + write_inside_fmu: bool = False, + ) -> None: + """Save updated model_description both inside FMU as well as separate file in the FMUs directory.""" + if model_description: + self.model_description = model_description + + self.model_description["_xmlOpts"]["_nameSpaces"] = { + "xs": "file:///C:/Software/OSP/xsd/fmi3ModelDescription.xsd" + } + + formatter = XmlFormatter() + formatted_xml = formatter.to_string(self.model_description) + + logger.info(f"{self.file.name}: write modelDescription.xml") + + # Write internal modelDescription.xml (inside FMU) + if write_inside_fmu: + _ = remove_files_from_zip(self.file, "modelDescription.xml") + _ = add_file_content_to_zip(self.file, "modelDescription.xml", formatted_xml) + + # Write external modelDescription.xml (separate file, beside FMU) + external_file = self.file.parent.absolute() / f"{self.file.stem}_ModelDescription.xml" + with Path.open(external_file, "w") as f: + _ = f.write(formatted_xml) + + return + + @property + def units(self) -> dict[str, Unit]: + """Returns a dict with all units defined in the FMU. + + Returns + ------- + Dict[str, Unit] + dict with all units + """ + model_unit_definitions: MutableMapping[Any, Any] = {} + if unit_definitions_key := find_key(self.model_description, "UnitDefinitions$"): + model_unit_definitions = self.model_description[unit_definitions_key] + # make sure unit definitions are unique (e.g. to keep XML files clean) + model_unit_definitions = shrink_dict(model_unit_definitions, unique_key=["_attributes", "name"]) + unit_definitions: dict[str, Unit] = {} + for u in model_unit_definitions.values(): + unit = Unit() + unit.name = u["_attributes"]["name"] + # BaseUnit + if base_unit_key := find_key(u, "BaseUnit$"): + unit.base_unit = BaseUnit() + if "kg" in u[base_unit_key]["_attributes"]: + unit.base_unit.kg = u[base_unit_key]["_attributes"]["kg"] + if "m" in u[base_unit_key]["_attributes"]: + unit.base_unit.m = u[base_unit_key]["_attributes"]["m"] + if "s" in u[base_unit_key]["_attributes"]: + unit.base_unit.s = u[base_unit_key]["_attributes"]["s"] + if "A" in u[base_unit_key]["_attributes"]: + unit.base_unit.A = u[base_unit_key]["_attributes"]["A"] + if "K" in u[base_unit_key]["_attributes"]: + unit.base_unit.K = u[base_unit_key]["_attributes"]["K"] + if "mol" in u[base_unit_key]["_attributes"]: + unit.base_unit.mol = u[base_unit_key]["_attributes"]["mol"] + if "cd" in u[base_unit_key]["_attributes"]: + unit.base_unit.cd = u[base_unit_key]["_attributes"]["cd"] + if "rad" in u[base_unit_key]["_attributes"]: + unit.base_unit.rad = u[base_unit_key]["_attributes"]["rad"] + if "factor" in u[base_unit_key]["_attributes"]: + unit.base_unit.factor = u[base_unit_key]["_attributes"]["factor"] + if "offset" in u[base_unit_key]["_attributes"]: + unit.base_unit.offset = u[base_unit_key]["_attributes"]["offset"] + # DisplayUnit + if display_unit_key := find_key(u, "DisplayUnit$"): + unit.display_unit = DisplayUnit() + if "name" in u[display_unit_key]["_attributes"]: + unit.display_unit.name = u[display_unit_key]["_attributes"]["name"] + if "factor" in u[display_unit_key]["_attributes"]: + unit.display_unit.factor = u[display_unit_key]["_attributes"]["factor"] + if "offset" in u[display_unit_key]["_attributes"]: + unit.display_unit.offset = u[display_unit_key]["_attributes"]["offset"] + unit_definitions[unit.name] = unit + return unit_definitions + + @property + def variables(self) -> dict[str, ScalarVariable]: + """Returns a dict with all scalar variables defined in the FMU. + + Returns + ------- + dict[str, ScalarVariable] + dict with all scalar variables + """ + model_variables_key = find_key(self.model_description, "ModelVariables$") + if not model_variables_key: + return {} + # Read model variables from model description + # (without "_" and "settings" as these are proprietary variables from iti) + model_variables = { + k: v + for k, v in self.model_description[model_variables_key].items() + if not re.match("^(_|settings)", v["_attributes"]["name"]) + } + # Translate variable attributes from model description into Variable objects + variables: dict[str, ScalarVariable] = {} + for k, v in model_variables.items(): + variable_type: str = re.sub(r"^\d{6}_", "", k) + if variable_type == "ScalarVariable": + variable = ScalarVariable(name=v["_attributes"]["name"]) + if "valueReference" in v["_attributes"]: + variable.value_reference = v["_attributes"]["valueReference"] + if "description" in v["_attributes"]: + variable.description = v["_attributes"]["description"] + if "causality" in v["_attributes"]: + variable.causality = v["_attributes"]["causality"] + if "variability" in v["_attributes"]: + variable.variability = v["_attributes"]["variability"] + if type_identifier := find_type_identifier_in_keys(v): + variable.data_type = type_identifier + type_key = find_key(v, f"{type_identifier}$") + if v[type_key] and "_attributes" in v[type_key]: + if "quantity" in v[type_key]["_attributes"]: + variable.quantity = v[type_key]["_attributes"]["quantity"] + if "unit" in v[type_key]["_attributes"]: + variable.unit = v[type_key]["_attributes"]["unit"] + if "display_unit" in v[type_key]["_attributes"]: + variable.display_unit = v[type_key]["_attributes"]["display_unit"] + if "start" in v[type_key]["_attributes"]: + variable.start = v[type_key]["_attributes"]["start"] + variables[variable.name] = variable + + return variables + + @property + def default_experiment(self) -> Experiment | None: + """Returns the default experiment, if defined in the FMU. + + Returns + ------- + Union[Experiment, None] + the default experiment, if defined. Otherwise None. + """ + default_experiment_key = find_key(self.model_description, "DefaultExperiment$") + if not default_experiment_key: + return None + default_experiment = Experiment() + default_experiment_properties = self.model_description[default_experiment_key] + if "_attributes" in default_experiment_properties: + default_experiment_attributes = default_experiment_properties["_attributes"] + if "startTime" in default_experiment_attributes: + default_experiment.start_time = default_experiment_attributes["startTime"] + if "stopTime" in default_experiment_attributes: + default_experiment.stop_time = default_experiment_attributes["stopTime"] + if "tolerance" in default_experiment_attributes: + default_experiment.tolerance = default_experiment_attributes["tolerance"] + if "stepSize" in default_experiment_attributes: + default_experiment.step_size = default_experiment_attributes["stepSize"] + return default_experiment + + def copy(self, new_name: str) -> FMU: + """Save a copy of the FMU with a new name. + + Parameters + ---------- + new_name : str + Intended name of the copy. The new name must be different from the existing name. + + Returns + ------- + FMU + The new FMU + """ + # Prepare + new_name = Path(new_name).stem + existing_file_name = self.file.stem + if new_name == existing_file_name: + logger.error(f"{self.file.name} copy: new name {new_name} is identical with existing name. copy() aborted.") + new_model_description: CppDict = deepcopy(self.model_description) + new_file = self.file.parent.absolute() / f"{new_name}.fmu" + + # Copy FMU + _ = copyfile(self.file, new_file) + + # Rename *.dll files in FMU to match new fmu name + with ZipFile(new_file, "r") as document: + dll_file_names = [ + file.filename + for file in document.infolist() + if re.search(r".*\.dll$", file.filename) and existing_file_name in file.filename + ] + new_dll_file_names = [re.sub(existing_file_name, new_name, dll_file_name) for dll_file_name in dll_file_names] + for dll_file_name, new_dll_file_name in zip(dll_file_names, new_dll_file_names, strict=False): + logger.info(f"{self.file.name} copy: renaming dll {dll_file_name} to {new_dll_file_name}") + _ = rename_file_in_zip(new_file, dll_file_name, new_dll_file_name) + + # Rename in modelDescription.xml + new_model_description["_xmlOpts"]["_rootAttributes"]["modelName"] = new_name + + # Rename in modelDescription.xml + # (STC requires consistency between and ) + co_simulation: MutableMapping[Any, Any] = new_model_description[ + find_key(new_model_description, "CoSimulation$") + ] + co_simulation["_attributes"]["modelIdentifier"] = new_name + + # Log the update in modelDescription.xml + self._log_update_in_model_description(new_model_description) + + # Write updated modelDescription.xml into new FMU + new_fmu = FMU(new_file) + new_fmu._write_model_description(new_model_description) # noqa: SLF001 + + return new_fmu + + def proxify(self, host: str, port: int) -> FMU: + """Create a proxy version of the FMU. + + For details see https://github.com/NTNU-IHB/FMU-proxy + + Parameters + ---------- + host : str + Remote host + port : int + Remote port + + Returns + ------- + FMU + The created proxy version of the FMU + """ + import subprocess + + remote_string = f"--remote={host}:{port}" + command = f"fmu-proxify {self.file.name} {remote_string}" + try: + _ = subprocess.run(command, timeout=60, check=False) # noqa: S603 + except subprocess.TimeoutExpired: + logger.exception(f"Timeout occured when calling {command}.") + return self + proxy_fmu_file = self.file.parent.absolute() / f"{self.file.stem}-proxy.fmu" + return FMU(proxy_fmu_file) + + def _modify_start_values(self, variables_with_start_values: dict[str, ScalarVariable]) -> None: + """Modify the start values of variables inside the FMUs modelDescription.xml.""" + logger.info(f"{self.file.name}: update start values of variables in modelDescription.xml") # 2 + + model_variables: MutableMapping[Any, Any] = self.model_description[ + find_key(self.model_description, "ModelVariables$") + ] + + names_of_variables_with_start_values: list[str] = [ + variable.name for _, variable in variables_with_start_values.items() + ] + + for model_variable_key, model_variable_properties in model_variables.items(): + model_variable_name: str = model_variable_properties["_attributes"]["name"] + + if model_variable_name in names_of_variables_with_start_values: + variable_with_start_values = variables_with_start_values[model_variable_name] + type_identifier = find_type_identifier_in_keys(model_variable_properties) + type_key = find_key(model_variable_properties, f"{type_identifier}$") + + logger.info( + f"{self.file.name}: update start values for variable {model_variable_name}:\n" + f"\tstart:\t\t{variable_with_start_values.start}\n" + f"\tcausality:\t {variable_with_start_values.causality}\n" + f"\tvariability:\t{variable_with_start_values.variability}" + ) + + model_variables[model_variable_key][type_key]["_attributes"]["start"] = variable_with_start_values.start + model_variables[model_variable_key]["_attributes"]["causality"] = variable_with_start_values.causality + model_variables[model_variable_key]["_attributes"]["variability"] = ( + variable_with_start_values.variability + ) + + self._log_update_in_model_description() + + def _log_update_in_model_description( + self, + model_description: CppDict | None = None, + ) -> None: + model_description = model_description or self.model_description + + logger.info(f"{self.file.name}: update ") # 2 + # Author + old_author = model_description["_xmlOpts"]["_rootAttributes"]["author"] + new_author = os.environ["USER"] if platform.system() == "Linux" else os.environ["USERNAME"] + model_description["_xmlOpts"]["_rootAttributes"]["author"] = new_author + # DateAndTime + old_date = model_description["_xmlOpts"]["_rootAttributes"]["generationDateAndTime"] + new_date = str(datetime.now(tz=UTC)) + model_description["_xmlOpts"]["_rootAttributes"]["generationDateAndTime"] = new_date + # Log modifications in attribute + add_description_string = ( + f"\nmodified {datetime.now(tz=UTC).date()}:\n" + f"\tauthor {old_author} to {new_author}\n" + f"\tgenerationDateAndTime {old_date} to {new_date}\n" + ) + model_description["_xmlOpts"]["_rootAttributes"]["description"] += add_description_string + return + + # @TODO: Check when and where this method needs to be called. And why.. + # CLAROS, 2022-05-24 + def _clean_solver_internal_variables(self, model_description: MutableMapping[Any, Any]) -> None: + """Clean solver internal variables, such as '_iti_...'.""" + model_variables: Mapping[Any, Any] = model_description[find_key(model_description, "ModelVariables$")] + model_name = model_description["_xmlOpts"]["_rootAttributes"]["modelName"] + for model_variable_key in model_variables: + if "_origin" in model_variables[model_variable_key]: + model_variables[model_variable_key]["_origin"] = model_name + return diff --git a/src/ospx/fmi/unit.py b/src/ospx/fmi/unit.py index 83bbe594..c24a7514 100644 --- a/src/ospx/fmi/unit.py +++ b/src/ospx/fmi/unit.py @@ -1,59 +1,58 @@ -import logging -from dataclasses import dataclass, field -from typing import Union - -__ALL__ = ["Unit", "BaseUnit", "DisplayUnit"] - -logger = logging.getLogger(__name__) - - -@dataclass() -class BaseUnit: - r"""fmi 2.0 BaseUnit. - - Unit definition with reference to SI base units \n - base unit value = factor * unit value + offset \n - See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Unit.xsd - """ - - kg: int = 0 - m: int = 0 - s: int = 0 - A: int = 0 - K: int = 0 - mol: int = 0 - cd: int = 0 - rad: int = 0 - factor: float = 1.0 - offset: float = 0.0 - - -@dataclass() -class DisplayUnit: - r"""fmi 2.0 DisplayUnit. - - display unit value = factor * unit value + offset \n - See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Unit.xsd - """ - - # Name of DisplayUnit element, e.g. , . - # Name must be unique with respect to all other DisplayUnits defined inside a Unit element - # (in contrast, multiple Unit elements may have DisplayUnits with the same name). - name: str = field(default_factory=lambda: "-") - # display_unit value = factor * unit value + offset - factor: float = 1.0 - offset: float = 0.0 - - -@dataclass() -class Unit: - """fmi 2.0 Unit Definition. - - See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Unit.xsd - """ - - # Name of unit, e.g. "N.m", "Nm", "%/s". - # "name" must be unique with respect to all other unit elements inside the UnitDefinitions section. - name: str = field(default_factory=lambda: "-") - base_unit: Union[BaseUnit, None] = None - display_unit: DisplayUnit = field(default_factory=lambda: DisplayUnit()) +import logging +from dataclasses import dataclass, field + +__ALL__ = ["Unit", "BaseUnit", "DisplayUnit"] + +logger = logging.getLogger(__name__) + + +@dataclass() +class BaseUnit: + r"""fmi 2.0 BaseUnit. + + Unit definition with reference to SI base units \n + base unit value = factor * unit value + offset \n + See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Unit.xsd + """ + + kg: int = 0 + m: int = 0 + s: int = 0 + A: int = 0 + K: int = 0 + mol: int = 0 + cd: int = 0 + rad: int = 0 + factor: float = 1.0 + offset: float = 0.0 + + +@dataclass() +class DisplayUnit: + r"""fmi 2.0 DisplayUnit. + + display unit value = factor * unit value + offset \n + See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Unit.xsd + """ + + # Name of DisplayUnit element, e.g. , . + # Name must be unique with respect to all other DisplayUnits defined inside a Unit element + # (in contrast, multiple Unit elements may have DisplayUnits with the same name). + name: str = field(default_factory=lambda: "-") + # display_unit value = factor * unit value + offset + factor: float = 1.0 + offset: float = 0.0 + + +@dataclass() +class Unit: + """fmi 2.0 Unit Definition. + + See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Unit.xsd + """ + + # Name of unit, e.g. "N.m", "Nm", "%/s". + # "name" must be unique with respect to all other unit elements inside the UnitDefinitions section. + name: str = field(default_factory=lambda: "-") + base_unit: BaseUnit | None = None + display_unit: DisplayUnit = field(default_factory=lambda: DisplayUnit()) diff --git a/src/ospx/fmi/variable.py b/src/ospx/fmi/variable.py index cd49ac3f..01e300b4 100644 --- a/src/ospx/fmi/variable.py +++ b/src/ospx/fmi/variable.py @@ -1,256 +1,258 @@ -import logging -from collections.abc import Iterable -from typing import Any, List, Sequence, Union - -from dictIO import Formatter, Parser - -__ALL__ = ["ScalarVariable", "get_fmi_data_type"] - -logger = logging.getLogger(__name__) - - -class ScalarVariable: - """fmi 2.0 ScalarVariable. - - See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2ScalarVariable.xsd - """ - - def __init__( - self, - name: str, - data_type: Union[str, None] = None, - causality: Union[str, None] = None, - variability: Union[str, None] = None, - start: Union[int, float, bool, str, None] = None, - value_reference: int = 0, - description: Union[str, None] = None, - quantity: Union[str, None] = None, - unit: Union[str, None] = None, - display_unit: Union[str, None] = None, - ): - # Attributes - self.name: str - self._data_type: Union[str, None] = None - self._causality: str = "local" - self._variability: Union[str, None] = None - self._start: Union[int, float, bool, str, None] = None - self.value_reference: int = 0 - self.description: Union[str, None] = None - self.quantity: Union[str, None] = None - self.unit: Union[str, None] = None - self.display_unit: Union[str, None] = None - # Initialization - self.name = name - if data_type: - self.data_type = data_type - if causality: - self.causality = causality - if variability: - self.variability = variability - if start: - self.start = start - self.value_reference = value_reference - self.description = description - self.quantity = quantity - self.unit = unit - self.display_unit = display_unit - - @property - def data_type(self) -> Union[str, None]: - """Returns the FMI data type of the scalar Variable.""" - return self._data_type - - @data_type.setter - def data_type(self, type: str): - """Set the FMI data type of the scalar Variable. - - Valid values are: - "Real" - "Integer" - "Boolean" - "String" - "Enumeration" - """ - valid_types: list[str] = [ - "Real", - "Integer", - "Boolean", - "String", - "Enumeration", - ] - if type not in valid_types: - logger.error(f"variable {self.name}: value for data_type '{type}' is invalid.") - return - self._data_type = type - return - - @property - def causality(self) -> str: - """Returns the causality of the scalar Variable.""" - return self._causality - - @causality.setter - def causality(self, value: str): - """Set the causality of the scalar Variable. - - Valid values are: - "parameter" - "calculatedParameter" - "input" - "output" - "local" - "independent" - "structuralParameter" - """ - valid_values: list[str] = [ - "parameter", - "calculatedParameter", - "input", - "output", - "local", - "independent", - "structuralParameter", - ] - if value not in valid_values: - logger.error(f"variable {self.name}: causality value '{value}' is invalid.") - return - self._causality = value - return - - @property - def variability(self) -> Union[str, None]: - """Returns the variability of the scalar Variable.""" - return self._variability - - @variability.setter - def variability(self, value: str): - """Set the variability of the scalar Variable. - - Valid values are: - "constant" - "fixed" - "tunable" - "discrete" - "continuous" - """ - valid_values: list[str] = [ - "constant", - "fixed", - "tunable", - "discrete", - "continuous", - ] - if value not in valid_values: - logger.error(f"variable {self.name}: value for variability '{value}' is invalid.") - return - self._variability = value - return - - @property - def start(self) -> Union[int, float, bool, str, None]: - """Returns the start value (initial value) of the scalar Variable.""" - return self._start - - @start.setter - def start(self, value: Union[int, float, bool, str, None]): - """Set the start value (initial value) of the scalar Variable.""" - if value is None: - logger.error(f"variable {self.name}: start value shall be set to 'None', but 'None' is invalid for start.") - return - if self.data_type: - # make sure the data type of the new value does either match or gets casted to the data_type defined for the variable - new_value_data_type = get_fmi_data_type(value) - if new_value_data_type == self.data_type: - self._start = value - else: - casted_value = _cast_to_fmi_data_type(value, self.data_type) - if casted_value is not None and not isinstance(casted_value, Sequence): - self._start = casted_value - elif casted_value is None: - logger.error( - f"variable {self.name}: start shall be set to 'None', but 'None' is invalid for start." - ) - return - else: - logger.error( - f"variable {self.name}: start shall be set to {casted_value}, but fmi data type 'Enumeration' is invalid for start." - ) - return - else: - self._start = value - self.data_type = get_fmi_data_type(self.start) - - -def get_fmi_data_type(arg: Any) -> str: - r"""Return the fmi 2.0 data type corresponding to Python type of the passed in argument. - - See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Type.xsd - - Parameters - ---------- - arg : Any - The argument for which the fmi 2.0 data type shall be determined - - Returns - ------- - str - The fmi 2.0 data type, returned as string literal.\n - valid fmi 2.0 data types are 'Integer', 'Real', 'Boolean', 'String' and 'Enumeration' - """ - - if isinstance(arg, int): - return "Integer" - elif isinstance(arg, float): - return "Real" - elif isinstance(arg, bool): - return "Boolean" - # not regarding the content, sequence is always returned if not int or float, e.g. string. - # requires a solution, if xs:enumeration is required. - # elif isinstance(arg, Sequence): - # return 'Enumeration' - else: - return "String" - - -def _cast_to_fmi_data_type( - arg: Union[int, float, bool, str, Sequence[Any]], fmi_data_type: str -) -> Union[int, float, bool, str, List[Any], None]: - r"""Casts the passed in argument to a Python data type that matches the requested fmi data type. - - Parameters - ---------- - arg : Union[int, float, bool, str, Sequence[Any]] - The argument to be casted - fmi_data_type : str - The fmi data type the argument shall be casted to.\n - valid fmi 2.0 data types are 'Integer', 'Real', 'Boolean', 'String' and 'Enumeration' - - Returns - ------- - Union[int, float, bool, str, List[Any], None] - The casted value (in a Python data type that matches the requested fmi data type) - """ - if fmi_data_type in {"Integer", "Real", "Boolean"}: - if isinstance(arg, Sequence): - logger.warning( - f"_cast_to_fmi_data_type(): argument {arg} of type List/Tuple/Sequence cannot be casted to fmi data type {fmi_data_type}" - ) - return None - # parse if arg is string - parsed_value: Union[int, float, bool] - parsed_value = Parser().parse_type(arg) if isinstance(arg, str) else arg - # cast to int / float / bool - if fmi_data_type == "Integer": - return int(parsed_value) - elif fmi_data_type == "Real": - return float(parsed_value) - else: - return bool(parsed_value) - elif fmi_data_type == "String": - # format as string - return Formatter().format_dict(arg) if isinstance(arg, Sequence) else Formatter().format_type(arg) - elif fmi_data_type == "Enumeration": - # cast to list - return list(arg) if isinstance(arg, Iterable) else [arg] - else: - return None +# ruff: noqa: PYI041 +import logging +from collections.abc import Iterable, Sequence +from typing import Any + +from dictIO import Formatter, Parser + +__ALL__ = ["ScalarVariable", "get_fmi_data_type"] + +logger = logging.getLogger(__name__) + + +class ScalarVariable: + """fmi 2.0 ScalarVariable. + + See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2ScalarVariable.xsd + """ + + def __init__( # noqa: PLR0913 + self, + name: str, + data_type: str | None = None, + causality: str | None = None, + variability: str | None = None, + start: int | float | bool | str | None = None, + value_reference: int = 0, + description: str | None = None, + quantity: str | None = None, + unit: str | None = None, + display_unit: str | None = None, + ) -> None: + # Attributes + self.name: str + self._data_type: str | None = None + self._causality: str = "local" + self._variability: str | None = None + self._start: int | float | bool | str | None = None + self.value_reference: int = 0 + self.description: str | None = None + self.quantity: str | None = None + self.unit: str | None = None + self.display_unit: str | None = None + # Initialization + self.name = name + if data_type: + self.data_type = data_type + if causality: + self.causality = causality + if variability: + self.variability = variability + if start: + self.start = start + self.value_reference = value_reference + self.description = description + self.quantity = quantity + self.unit = unit + self.display_unit = display_unit + + @property + def data_type(self) -> str | None: + """Returns the FMI data type of the scalar Variable.""" + return self._data_type + + @data_type.setter + def data_type(self, value: str) -> None: + """Set the FMI data type of the scalar Variable. + + Valid values are: + "Real" + "Integer" + "Boolean" + "String" + "Enumeration" + """ + valid_types: list[str] = [ + "Real", + "Integer", + "Boolean", + "String", + "Enumeration", + ] + if value not in valid_types: + logger.error(f"variable {self.name}: value for data_type '{value}' is invalid.") + return + self._data_type = value + return + + @property + def causality(self) -> str: + """Returns the causality of the scalar Variable.""" + return self._causality + + @causality.setter + def causality(self, value: str) -> None: + """Set the causality of the scalar Variable. + + Valid values are: + "parameter" + "calculatedParameter" + "input" + "output" + "local" + "independent" + "structuralParameter" + """ + valid_values: list[str] = [ + "parameter", + "calculatedParameter", + "input", + "output", + "local", + "independent", + "structuralParameter", + ] + if value not in valid_values: + logger.error(f"variable {self.name}: causality value '{value}' is invalid.") + return + self._causality = value + return + + @property + def variability(self) -> str | None: + """Returns the variability of the scalar Variable.""" + return self._variability + + @variability.setter + def variability(self, value: str) -> None: + """Set the variability of the scalar Variable. + + Valid values are: + "constant" + "fixed" + "tunable" + "discrete" + "continuous" + """ + valid_values: list[str] = [ + "constant", + "fixed", + "tunable", + "discrete", + "continuous", + ] + if value not in valid_values: + logger.error(f"variable {self.name}: value for variability '{value}' is invalid.") + return + self._variability = value + return + + @property + def start(self) -> int | float | bool | str | None: + """Returns the start value (initial value) of the scalar Variable.""" + return self._start + + @start.setter + def start(self, value: int | float | bool | str | None) -> None: + """Set the start value (initial value) of the scalar Variable.""" + if value is None: + logger.error(f"variable {self.name}: start value shall be set to 'None', but 'None' is invalid for start.") + return + if self.data_type: + # make sure the data type of the new value does either match + # or gets casted to the data_type defined for the variable + new_value_data_type = get_fmi_data_type(value) + if new_value_data_type == self.data_type: + self._start = value + else: + casted_value = _cast_to_fmi_data_type(value, self.data_type) + if casted_value is not None and not isinstance(casted_value, Sequence): + self._start = casted_value + elif casted_value is None: + logger.error( + f"variable {self.name}: start shall be set to 'None', but 'None' is invalid for start." + ) + return + else: + logger.error( + f"variable {self.name}: start shall be set to {casted_value}, " + "but fmi data type 'Enumeration' is invalid for start." + ) + return + else: + self._start = value + self.data_type = get_fmi_data_type(self.start) + + +def get_fmi_data_type(arg: object) -> str: + r"""Return the fmi 2.0 data type corresponding to Python type of the passed in argument. + + See https://github.com/modelica/fmi-standard/blob/v2.0.x/schema/fmi2Type.xsd + + Parameters + ---------- + arg : Any + The argument for which the fmi 2.0 data type shall be determined + + Returns + ------- + str + The fmi 2.0 data type, returned as string literal.\n + valid fmi 2.0 data types are 'Integer', 'Real', 'Boolean', 'String' and 'Enumeration' + """ + # sourcery skip: assign-if-exp, reintroduce-else + if isinstance(arg, int): + return "Integer" + if isinstance(arg, float): + return "Real" + if isinstance(arg, bool): + return "Boolean" + # not regarding the content, sequence is always returned if not int or float, e.g. string. + # requires a solution, if xs:enumeration is required. + # elif isinstance(arg, Sequence): # noqa: ERA001 + # return 'Enumeration' # noqa: ERA001 + return "String" + + +def _cast_to_fmi_data_type( + arg: int | float | bool | str | Sequence[Any], + fmi_data_type: str, +) -> int | float | bool | str | list[Any] | None: + r"""Casts the passed in argument to a Python data type that matches the requested fmi data type. + + Parameters + ---------- + arg : Union[int, float, bool, str, Sequence[Any]] + The argument to be casted + fmi_data_type : str + The fmi data type the argument shall be casted to.\n + valid fmi 2.0 data types are 'Integer', 'Real', 'Boolean', 'String' and 'Enumeration' + + Returns + ------- + Union[int, float, bool, str, List[Any], None] + The casted value (in a Python data type that matches the requested fmi data type) + """ + if fmi_data_type in {"Integer", "Real", "Boolean"}: + if isinstance(arg, Sequence): + logger.warning( + f"_cast_to_fmi_data_type(): argument {arg} of type List/Tuple/Sequence " + f"cannot be casted to fmi data type {fmi_data_type}" + ) + return None + # parse if arg is string + parsed_value: int | float | bool + parsed_value = Parser().parse_type(arg) if isinstance(arg, str) else arg + # cast to int / float / bool + if fmi_data_type == "Integer": + return int(parsed_value) + if fmi_data_type == "Real": + return float(parsed_value) + return bool(parsed_value) + if fmi_data_type == "String": + # format as string + return Formatter().format_dict(arg) if isinstance(arg, Sequence) else Formatter().format_type(arg) + if fmi_data_type == "Enumeration": + # cast to list + return list(arg) if isinstance(arg, Iterable) else [arg] + return None diff --git a/src/ospx/graph.py b/src/ospx/graph.py index 2932c79c..4bb3942a 100644 --- a/src/ospx/graph.py +++ b/src/ospx/graph.py @@ -1,217 +1,226 @@ -# pyright: reportUnknownMemberType=false -# pyright: reportUnknownParameterType=false -# pyright: reportUnnecessaryTypeIgnoreComment=false - -import functools -import logging -import re -from typing import Any, Dict, Tuple, Union - -from graphviz import Digraph -from graphviz.dot import Dot - -from ospx import Component, Connection, OspSimulationCase - -__ALL__ = ["Graph"] - -logger = logging.getLogger(__name__) - - -class Graph: - """Class providing methods to generate a visual dependency graph - of a system's components and its connections. - """ - - @staticmethod - def generate_dependency_graph(case: OspSimulationCase): - """Generate a dependency graph of the system structure as pdf, for documentation. - - Note: This requires graphviz to be installed on the local machine - """ - graphiz_not_found_error_mesage: str = ( - "OspSimulationCase.generate_dependency_graph(): failed to run graphviz. \n" - "To generate the system structure dependency graph, graphviz needs to be installed on the local machine. \n" - "Kindly check your local installation of graphviz." - ) - - # Default styles - text_size: str = "11" - styles: Dict[str, Dict[str, str]] = { - "graph": { - "label": f"{case.simulation.name}", - "fontname": "Verdana", - "fontsize": text_size, - "fontcolor": "black", - "bgcolor": "white", - "rankdir": "TD", - "overlap": "compress", - "sep": "10,100", - "remincross": "true", - "ratio": "fill", - "margin": "0", - "size": "10, 10!", - }, - "nodes": { - "fontname": "Verdana", - "fontsize": text_size, - "fontcolor": "white", - "shape": "square", - "color": "magenta", - "style": "filled", - "fillcolor": "magenta", - }, - "edges": { - "style": "dashed", - "color": "magenta", - "penwidth": "3", - "arrowhead": "open", - "fontname": "Verdana", - "fontsize": text_size, - "fontcolor": "magenta", - }, - } - - basic_op_names: str = "(power|dot|sum|diff|prod|div|quotient)" - input_names: str = "^(INP|inp)" - - callgraph: Dot - try: - digraph = functools.partial(Digraph, format="png") - callgraph = digraph() - callgraph = _apply_styles(callgraph, styles) - except Exception: - logger.exception(graphiz_not_found_error_mesage) - return - - # Components - for component in case.system_structure.components.values(): - label_key, label = _get_node_label(component) - # var_keys = find_key(case.models[key]['InitialValues'], 'InitialValue') - # variables = {} - label = _create_table( - label_key, - { - "source:": component.fmu.file.name, - "stepsize:": component.step_size, - "variables:": "", - }, - ) - - if re.search(input_names, component.name): - shape = "diamond" - style = "filled,rounded" - fillcolor = "#FFFFFF" - elif re.search(basic_op_names, component.name, re.I): - label = label - shape = "square" - style = "filled, rounded" - fillcolor = "#EEBBDD" - else: - shape = "square" - style = "filled" - fillcolor = "#DDDDEE" - - callgraph.node( - label_key, - label=label, - fontname="Verdana", - fontsize=text_size, - fontcolor="black", - shape=shape, - color="black", - style=style, - fillcolor=fillcolor, - ) - - # Connections - - for _, connection in case.system_structure.connections.items(): - if not (connection.source_endpoint and connection.target_endpoint): - return - if not (connection.source_endpoint.component and connection.target_endpoint.component): - return - from_key: str = connection.source_endpoint.component.name - to_key: str = connection.target_endpoint.component.name - - label = _get_edge_label(connection) - - if re.search(input_names, from_key, re.I): - label = "input\n%s" % label - style = "dashed" - color = "#003399" - fontcolor = "#003399" - penwidth = ("%i" % 1,) - weight = "%i" % 1 - - elif re.search(basic_op_names, from_key, re.I): - style = "filled" - color = "#995566" - fontcolor = "#663344" - penwidth = ("%i" % 3,) - weight = ("%.2f" % 0.66,) - - else: - style = "bold" - color = "black" - fontcolor = "black" - penwidth = ("%i" % int(round((2) ** 1.5, 0)),) - weight = ("%i" % int(round((2) ** 1.5, 0)),) - - callgraph.edge( - from_key, - to_key, - style=style, - color=color, - arrowhead="open", - fontname="Verdana", - fontsize=text_size, - fontcolor=fontcolor, - penwidth=str(penwidth), - weight=str(weight), - label=label, - overlap="false", - splines="true", - ) - - # Create callGraph pdf - - try: - callgraph.render(f"{case.simulation.name}_callGraph", format="pdf") # type: ignore - except Exception: - logger.exception(graphiz_not_found_error_mesage) - - return - - -def _apply_styles(digraph: Dot, styles: Dict[str, Any]) -> Dot: - digraph.graph_attr.update(("graph" in styles and styles["graph"]) or {}) # type: ignore - digraph.node_attr.update(("nodes" in styles and styles["nodes"]) or {}) # type: ignore - digraph.edge_attr.update(("edges" in styles and styles["edges"]) or {}) # type: ignore - return digraph - - -def _get_node_label(component: Component) -> Tuple[str, str]: - label = f"{component.name}\n___________\n\nfmu\n" - label += re.sub(r"(^.*/|^.*\\|\.fmu.*$)", "", component.fmu.file.name) - - label_key = component.name - return label_key, label - - -def _get_edge_label(connection: Connection) -> str: - return ( - f"{connection.source_endpoint.variable_name}-->{connection.target_endpoint.variable_name}" - if connection.is_valid - else "" - ) - - -def _create_table(name: str, child: Union[Dict[str, Any], None] = None) -> str: - _child: Dict[str, Any] = child or {" ": " "} - n_child = len(_child) - string: str = f'<\n\n\n\n\n' - for key, item in _child.items(): - string += f"\n" - string += "
{name}
{key}{item}
\n>" - - return string +# pyright: reportUnknownMemberType=false +# pyright: reportUnknownParameterType=false +# pyright: reportUnnecessaryTypeIgnoreComment=false + +import functools +import logging +import re +from typing import Any + +from graphviz import Digraph +from graphviz.graphs import BaseGraph + +from ospx import Component, Connection, OspSimulationCase + +__ALL__ = ["Graph"] + +logger = logging.getLogger(__name__) + + +class Graph: + """Class providing methods to generate a visual dependency graph + of a system's components and its connections. + """ + + @staticmethod + def generate_dependency_graph(case: OspSimulationCase) -> None: + """Generate a dependency graph of the system structure as pdf, for documentation. + + Note: This requires graphviz to be installed on the local machine + """ + graphiz_not_found_error_mesage: str = ( + "OspSimulationCase.generate_dependency_graph(): failed to run graphviz. \n" + "To generate the system structure dependency graph, graphviz needs to be installed on the local machine. \n" + "Kindly check your local installation of graphviz." + ) + + label: str + shape: str + style: str + color: str + fontcolor: str + fillcolor: str + penwidth: tuple[str] + weight: tuple[str] + + # Default styles + text_size: str = "11" + styles: dict[str, dict[str, str]] = { + "graph": { + "label": f"{case.simulation.name}", + "fontname": "Verdana", + "fontsize": text_size, + "fontcolor": "black", + "bgcolor": "white", + "rankdir": "TD", + "overlap": "compress", + "sep": "10,100", + "remincross": "true", + "ratio": "fill", + "margin": "0", + "size": "10, 10!", + }, + "nodes": { + "fontname": "Verdana", + "fontsize": text_size, + "fontcolor": "white", + "shape": "square", + "color": "magenta", + "style": "filled", + "fillcolor": "magenta", + }, + "edges": { + "style": "dashed", + "color": "magenta", + "penwidth": "3", + "arrowhead": "open", + "fontname": "Verdana", + "fontsize": text_size, + "fontcolor": "magenta", + }, + } + + basic_op_names: str = "(power|dot|sum|diff|prod|div|quotient)" + input_names: str = "^(INP|inp)" + + callgraph: BaseGraph + try: + digraph = functools.partial(Digraph, format="png") + callgraph = digraph() + callgraph = _apply_styles(callgraph, styles) + except Exception: + logger.exception(graphiz_not_found_error_mesage) + return + + # Components + for component in case.system_structure.components.values(): + label_key, label = _get_node_label(component) + label = _create_table( + label_key, + { + "source:": component.fmu.file.name, + "stepsize:": component.step_size, + "variables:": "", + }, + ) + + if re.search(input_names, component.name): + shape = "diamond" + style = "filled,rounded" + fillcolor = "#FFFFFF" + elif re.search(basic_op_names, component.name, re.IGNORECASE): + shape = "square" + style = "filled, rounded" + fillcolor = "#EEBBDD" + else: + shape = "square" + style = "filled" + fillcolor = "#DDDDEE" + + callgraph.node( + label_key, + label=label, + fontname="Verdana", + fontsize=text_size, + fontcolor="black", + shape=shape, + color="black", + style=style, + fillcolor=fillcolor, + ) + + # Connections + + for connection in case.system_structure.connections.values(): + if not (connection.source_endpoint and connection.target_endpoint): + return + if not (connection.source_endpoint.component and connection.target_endpoint.component): + return + from_key: str = connection.source_endpoint.component.name + to_key: str = connection.target_endpoint.component.name + + label = _get_edge_label(connection) + + if re.search(input_names, from_key, re.IGNORECASE): + label = f"input\n{label}" + style = "dashed" + color = "#003399" + fontcolor = "#003399" + penwidth = ("%i" % 1,) + weight = ("%i" % 1,) + + elif re.search(basic_op_names, from_key, re.IGNORECASE): + style = "filled" + color = "#995566" + fontcolor = "#663344" + penwidth = ("%i" % 3,) + weight = (f"{0.66:.2f}",) + + else: + style = "bold" + color = "black" + fontcolor = "black" + penwidth = ("%i" % int(round((2) ** 1.5, 0)),) + weight = ("%i" % int(round((2) ** 1.5, 0)),) + + callgraph.edge( + from_key, + to_key, + style=style, + color=color, + arrowhead="open", + fontname="Verdana", + fontsize=text_size, + fontcolor=fontcolor, + penwidth=str(penwidth), + weight=str(weight), + label=label, + overlap="false", + splines="true", + ) + + # Create callGraph pdf + + try: + _ = callgraph.render(f"{case.simulation.name}_callGraph", format="pdf") + except Exception: + logger.exception(graphiz_not_found_error_mesage) + + return + + +def _apply_styles(digraph: BaseGraph, styles: dict[str, Any]) -> BaseGraph: + digraph.graph_attr.update(("graph" in styles and styles["graph"]) or {}) + digraph.node_attr.update(("nodes" in styles and styles["nodes"]) or {}) + digraph.edge_attr.update(("edges" in styles and styles["edges"]) or {}) + return digraph + + +def _get_node_label(component: Component) -> tuple[str, str]: + label = f"{component.name}\n___________\n\nfmu\n" + label += re.sub(r"(^.*/|^.*\\|\.fmu.*$)", "", component.fmu.file.name) + + label_key = component.name + return label_key, label + + +def _get_edge_label(connection: Connection) -> str: + return ( + f"{connection.source_endpoint.variable_name}-->{connection.target_endpoint.variable_name}" + if connection.is_valid + else "" + ) + + +def _create_table(name: str, child: dict[str, Any] | None = None) -> str: + _child: dict[str, Any] = child or {" ": " "} + n_child = len(_child) + string: str = ( + f'<\n\n\n' + f'\n\n' + ) + for key, item in _child.items(): + string += f"\n" + string += "
{name}
{key}{item}
\n>" + + return string diff --git a/src/ospx/importer.py b/src/ospx/importer.py index 8f0da121..50e5f020 100644 --- a/src/ospx/importer.py +++ b/src/ospx/importer.py @@ -1,297 +1,304 @@ -import logging -import os -import re -from pathlib import Path -from typing import Any, Dict, List, Union - -from dictIO import DictReader, DictWriter -from dictIO.utils.counter import BorgCounter -from dictIO.utils.path import highest_common_root_folder, relative_path - -from ospx.utils.dict import find_key, find_keys, find_type_identifier_in_keys - -__ALL__ = ["OspSystemStructureImporter"] - -logger = logging.getLogger(__name__) - - -class OspSystemStructureImporter: - """Class providing methods to convert an existing - OspSystemStructure.xml file to an ospx caseDict file. - """ - - @staticmethod - def import_system_structure( - system_structure_file: Union[str, os.PathLike[str]], - enter_lib_source_as_relative_path: bool = False, - ): - """Import an OspSystemStructure.xml file and save it as an ospx caseDict file. - - Parameters - ---------- - system_structure_file : Union[str, os.PathLike[str]] - the OspSystemStructure.xml file to be imported - enter_lib_source_as_relative_path : bool, optional - whether lib_source shall be entered as relative path in the caseDict, by default False - - Raises - ------ - FileNotFoundError - if system_structure_file does not exist - - NotImplementedError - if the OspSystemStructure contains connections of OSP-IS type 'SignalConnection' or 'SignalGroupConnection'. - These connection types are not implemented yet in ospx. - - NotImplementedError - if the OspSystemStructure contains connections with OSP-IS endpoint type 'Signal' or 'SignalGroup'. - These endpoint types are not implemented yet in ospx. - - TypeError - if the OspSystemStructure contains connections of an unknown type. - - TypeError - if the OspSystemStructure contains connections with an unknown endpoint type. - """ - - # Make sure source_file argument is of type Path. If not, cast it to Path type. - system_structure_file = ( - system_structure_file if isinstance(system_structure_file, Path) else Path(system_structure_file) - ) - - # Check whether system structure file exists - if not system_structure_file.exists(): - logger.error(f"OspSystemStructureImporter: File {system_structure_file} not found.") - raise FileNotFoundError(system_structure_file) - - if system_structure_file.suffix != ".xml": - logger.error(f"OspSystemStructureImporter: File type {system_structure_file} not implemented yet.") - return - - counter = BorgCounter() - - source_dict = DictReader.read(system_structure_file, comments=False) - source_folder: Path = system_structure_file.resolve().parent.absolute() - lib_source_folder: Path = source_folder # setting to source_folder acts as fallback / default - target_folder: Path = Path.cwd().absolute() - - # Main subdicts contained in systemStructure - connections: Dict[str, Dict[Any, Any]] = {} - components: Dict[str, Dict[Any, Any]] = {} - - # Connections - # iterate over the connections first as they contain the variable and component names - temp_connectors = {} - if connections_key := find_key(source_dict, "Connections$"): - for connection_type, connection_properties in source_dict[connections_key].items(): - connection_type: str = re.sub(r"(^\d{1,6}_)", "", connection_type) - - if connection_type not in { - "VariableConnection", - "VariableGroupConnection", - }: - if connection_type in {"SignalConnection", "SignalGroupConnection"}: - msg: str = ( - f"Import failed: {system_structure_file.name} contains a connection with OSP-IS connection type '{connection_type}'\n" - f"The support for connection type '{connection_type}' is not yet implemented in ospx." - ) - logger.error(msg) - raise NotImplementedError(msg) - else: - msg: str = f"Import failed: {system_structure_file.name} contains a connection with unknown connection type '{connection_type}'\n" - logger.error(msg) - raise TypeError(msg) - - connection: Dict[str, Dict[Any, Any]] = {} - connection_name: str = "" - # following loop has range {0,1} - for index, (endpoint_type, endpoint_properties) in enumerate(connection_properties.items()): - endpoint_type: str = re.sub(r"(^\d{1,6}_)", "", endpoint_type) - - if endpoint_type not in {"Variable", "VariableGroup"}: - if endpoint_type in {"Signal", "SignalGroup"}: - msg: str = ( - f"Import failed: {system_structure_file.name} contains a connection with OSP-IS endpoint type '{endpoint_type}'\n" - f"The support for endpoint type '{endpoint_type}' is not yet implemented in ospx." - ) - logger.error(msg) - raise NotImplementedError(msg) - else: - msg: str = f"Import failed: {system_structure_file.name} contains a connection with unknown endpoint type '{endpoint_type}'\n" - logger.error(msg) - raise TypeError(msg) - - component_name: str = endpoint_properties["_attributes"]["simulator"] - referenced_name: str = endpoint_properties["_attributes"]["name"] - # alternator for source <--> target (because there are always 2 entries in VariableConnection in always the same sequence) - endpoint_name: str = "source" if index % 2 == 0 else "target" - endpoint: dict[str, str] = {} - _connector_type: str = "output" if endpoint_name == "source" else "input" - _connector: dict[str, str] = {} - _connector_name: str = f"{component_name}_{referenced_name}" - if endpoint_type == "Variable": - endpoint = { - "component": component_name, - "variable": referenced_name, - } - _connector = { - "variable": referenced_name, - "type": _connector_type, - } - elif endpoint_type == "VariableGroup": - endpoint = { - "component": component_name, - "connector": _connector_name, - } - _connector = { - "variableGroup": referenced_name, - "type": _connector_type, - } - connection[endpoint_name] = endpoint - if not connection_name: - connection_name = component_name - else: - connection_name += f"_to_{component_name}" - - # Save _connector in temp_connectors dict. - # (The variable and component information stored in these connectors - # is later used to complete component properties) - temp_connectors[f"{counter():06d}_{component_name}"] = {_connector_name: _connector} - # Save in connections dict - if connection_name in connections: - suffix_number: int = 2 - _connection_name: str = f"{connection_name}_{suffix_number:02d}" - while _connection_name in connections: - suffix_number += 1 - _connection_name = f"{connection_name}_{suffix_number:02d}" - connection_name = _connection_name - connections[connection_name] = connection - # Simulators (=Components) - if simulators_key := find_key(source_dict, "Simulators$"): - # Determine the highest common root folder among all FMU's. - # This will be used as libSource folder. - fmu_folders: List[Path] = [] - for simulator_properties in source_dict[simulators_key].values(): - fmu_name: str = simulator_properties["_attributes"]["source"] - fmu_file: Path = Path(fmu_name) - fmu_folder: Path - if fmu_file.is_absolute(): - fmu_folder = fmu_file.resolve().parent.absolute() - else: - fmu_folder = (source_folder / fmu_file).resolve().parent.absolute() - fmu_folders.append(fmu_folder) - if fmu_folders: - lib_source_folder = highest_common_root_folder(fmu_folders) - - for simulator_properties in source_dict[simulators_key].values(): - # Component - component_name = simulator_properties["_attributes"]["name"] - # Connectors - component_connectors: Dict[str, Dict[Any, Any]] = {} - for temp_connector_key, connector in temp_connectors.items(): - if component_name in temp_connector_key: - component_connectors |= connector - # FMU - fmu_name: str = simulator_properties["_attributes"]["source"] - fmu_file: Path = Path(fmu_name) - if fmu_file.is_absolute(): - fmu_file = fmu_file.resolve() - else: - fmu_file = (source_folder / fmu_file).resolve() - fmu_file_relative_to_lib_source: Path = relative_path(lib_source_folder, fmu_file) - # Step Size - step_size: Union[float, None] = None - if "stepSize" in simulator_properties["_attributes"]: - step_size = float(simulator_properties["_attributes"]["stepSize"]) - # Initial values - component_initial_values: Dict[str, Dict[Any, Any]] = {} - if initial_values_key := find_key(simulator_properties, "InitialValues$"): - initial_values = simulator_properties[initial_values_key] - if initial_value_keys := find_keys(initial_values, "InitialValue$"): - for initial_value_key in initial_value_keys: - initial_value = initial_values[initial_value_key] - if data_type := find_type_identifier_in_keys(initial_value): - type_key = find_key(initial_value, f"{data_type}$") - if not type_key: - continue - _type: str = re.sub(r"(^\d{1,6}_)", "", type_key) - referenced_name = initial_value["_attributes"]["variable"] - value: Union[float, int, bool, str] - if _type == "Boolean": - value = bool(initial_value[type_key]["_attributes"]["value"]) - elif _type == "Integer": - value = int(initial_value[type_key]["_attributes"]["value"]) - elif _type == "Real": - value = float(initial_value[type_key]["_attributes"]["value"]) - else: - value = initial_value[type_key]["_attributes"]["value"] - component_initial_values |= {referenced_name: {"start": value}} - # Assemble component - component: Dict[str, Union[Dict[Any, Any], str, float, Path]] = { - "connectors": component_connectors, - "fmu": fmu_file_relative_to_lib_source, - } - if step_size: - component["stepSize"] = step_size - if component_initial_values: - component["initialize"] = component_initial_values - # Save in components dict - components[component_name] = component - - # System Structure - system_structure: Dict[str, Dict[str, Any]] = { - "connections": connections, - "components": components, - } - - # Global Settings - # 1: Defaults - simulation: Dict[str, Any] = { - "name": system_structure_file.stem, - "startTime": 0.0, - "baseStepSize": 0.01, - "algorithm": "fixedStep", - } - # 2: Overwrite defaults with values from source dict, where existing - if "_attributes" in source_dict: - attributes = source_dict["_attributes"] - if "StartTime" in attributes: - simulation["startTime"] = attributes["StartTime"] - if "BaseStepSize" in attributes: - simulation["baseStepSize"] = attributes["BaseStepSize"] - if "Algorithm" in attributes: - simulation["algorithm"] = attributes["Algorithm"] - - # Environment - environment: dict[str, Path] = {} - if enter_lib_source_as_relative_path: - try: - environment["libSource"] = relative_path( - from_path=target_folder, - to_path=lib_source_folder, - ) - except ValueError: - msg = ( - "Resolving relative path from target folder to libSource folder failed.\n" - "Absolute path for libSource is used instead." - ) - logger.warning(msg) - environment["libSource"] = lib_source_folder - else: - environment["libSource"] = lib_source_folder - - # Assemble case dict - case_dict = { - "_environment": environment, - "systemStructure": system_structure, - "run": { - "simulation": simulation, - }, - } - - source_file_name = source_dict.name.replace(".", "_") - target_file = Path.cwd() / f"caseDict_imported_from_{source_file_name}" - - DictWriter.write(case_dict, target_file, mode="w") - - return +import logging +import os +import re +from pathlib import Path +from typing import Any + +from dictIO import DictReader, DictWriter +from dictIO.utils.counter import BorgCounter +from dictIO.utils.path import highest_common_root_folder, relative_path + +from ospx.utils.dict import find_key, find_keys, find_type_identifier_in_keys + +__ALL__ = ["OspSystemStructureImporter"] + +logger = logging.getLogger(__name__) + + +class OspSystemStructureImporter: + """Class providing methods to convert an existing + OspSystemStructure.xml file to an ospx caseDict file. + """ + + @staticmethod + def import_system_structure( + system_structure_file: str | os.PathLike[str], + *, + enter_lib_source_as_relative_path: bool = False, + ) -> None: + """Import an OspSystemStructure.xml file and save it as an ospx caseDict file. + + Parameters + ---------- + system_structure_file : Union[str, os.PathLike[str]] + the OspSystemStructure.xml file to be imported + enter_lib_source_as_relative_path : bool, optional + whether lib_source shall be entered as relative path in the caseDict, by default False + + Raises + ------ + FileNotFoundError + if system_structure_file does not exist + + NotImplementedError + if the OspSystemStructure contains connections of OSP-IS type 'SignalConnection' or 'SignalGroupConnection'. + These connection types are not implemented yet in ospx. + + NotImplementedError + if the OspSystemStructure contains connections with OSP-IS endpoint type 'Signal' or 'SignalGroup'. + These endpoint types are not implemented yet in ospx. + + TypeError + if the OspSystemStructure contains connections of an unknown type. + + TypeError + if the OspSystemStructure contains connections with an unknown endpoint type. + """ + # Make sure source_file argument is of type Path. If not, cast it to Path type. + system_structure_file = ( + system_structure_file if isinstance(system_structure_file, Path) else Path(system_structure_file) + ) + + # Check whether system structure file exists + if not system_structure_file.exists(): + logger.error(f"OspSystemStructureImporter: File {system_structure_file} not found.") + raise FileNotFoundError(system_structure_file) + + if system_structure_file.suffix != ".xml": + logger.error(f"OspSystemStructureImporter: File type {system_structure_file} not implemented yet.") + return + + counter = BorgCounter() + + source_dict = DictReader.read(system_structure_file, comments=False) + source_folder: Path = system_structure_file.resolve().parent.absolute() + lib_source_folder: Path = source_folder # setting to source_folder acts as fallback / default + target_folder: Path = Path.cwd().absolute() + + # Main subdicts contained in systemStructure + connections: dict[str, dict[Any, Any]] = {} + components: dict[str, dict[Any, Any]] = {} + + # Connections + # iterate over the connections first as they contain the variable and component names + temp_connectors = {} + msg: str + if connections_key := find_key(source_dict, "Connections$"): + for _connection_type, connection_properties in source_dict[connections_key].items(): + connection_type: str = re.sub(r"(^\d{1,6}_)", "", _connection_type) + + if connection_type not in { + "VariableConnection", + "VariableGroupConnection", + }: + if connection_type in {"SignalConnection", "SignalGroupConnection"}: + msg = ( + f"Import failed: {system_structure_file.name} contains a connection " + f"with OSP-IS connection type '{connection_type}'\n" + f"The support for connection type '{connection_type}' is not yet implemented in ospx." + ) + logger.error(msg) + raise NotImplementedError(msg) + msg = ( + f"Import failed: {system_structure_file.name} contains a connection with " + f"unknown connection type '{connection_type}'\n" + ) + logger.error(msg) + raise TypeError(msg) + + connection: dict[str, dict[Any, Any]] = {} + connection_name: str = "" + # following loop has range {0,1} + for index, (_endpoint_type, endpoint_properties) in enumerate(connection_properties.items()): + endpoint_type: str = re.sub(r"(^\d{1,6}_)", "", _endpoint_type) + + if endpoint_type not in {"Variable", "VariableGroup"}: + if endpoint_type in {"Signal", "SignalGroup"}: + msg = ( + f"Import failed: {system_structure_file.name} contains a connection " + f"with OSP-IS endpoint type '{endpoint_type}'\n" + f"The support for endpoint type '{endpoint_type}' is not yet implemented in ospx." + ) + logger.error(msg) + raise NotImplementedError(msg) + msg = ( + f"Import failed: {system_structure_file.name} contains a connection " + f"with unknown endpoint type '{endpoint_type}'\n" + ) + logger.error(msg) + raise TypeError(msg) + + component_name: str = endpoint_properties["_attributes"]["simulator"] + referenced_name: str = endpoint_properties["_attributes"]["name"] + # alternator for source <--> target + # (because there are always 2 entries in VariableConnection in always the same sequence) + endpoint_name: str = "source" if index % 2 == 0 else "target" + endpoint: dict[str, str] = {} + _connector_type: str = "output" if endpoint_name == "source" else "input" + _connector: dict[str, str] = {} + _connector_name: str = f"{component_name}_{referenced_name}" + if endpoint_type == "Variable": + endpoint = { + "component": component_name, + "variable": referenced_name, + } + _connector = { + "variable": referenced_name, + "type": _connector_type, + } + elif endpoint_type == "VariableGroup": + endpoint = { + "component": component_name, + "connector": _connector_name, + } + _connector = { + "variableGroup": referenced_name, + "type": _connector_type, + } + connection[endpoint_name] = endpoint + if not connection_name: + connection_name = component_name + else: + connection_name += f"_to_{component_name}" + + # Save _connector in temp_connectors dict. + # (The variable and component information stored in these connectors + # is later used to complete component properties) + temp_connectors[f"{counter():06d}_{component_name}"] = {_connector_name: _connector} + # Save in connections dict + if connection_name in connections: + suffix_number: int = 2 + _connection_name: str = f"{connection_name}_{suffix_number:02d}" + while _connection_name in connections: + suffix_number += 1 + _connection_name = f"{connection_name}_{suffix_number:02d}" + connection_name = _connection_name + connections[connection_name] = connection + # Simulators (=Components) + if simulators_key := find_key(source_dict, "Simulators$"): + # Determine the highest common root folder among all FMU's. + # This will be used as libSource folder. + fmu_folders: list[Path] = [] + fmu_name: str + fmu_file: Path + fmu_folder: Path + for simulator_properties in source_dict[simulators_key].values(): + fmu_name = simulator_properties["_attributes"]["source"] + fmu_file = Path(fmu_name) + if fmu_file.is_absolute(): + fmu_folder = fmu_file.resolve().parent.absolute() + else: + fmu_folder = (source_folder / fmu_file).resolve().parent.absolute() + fmu_folders.append(fmu_folder) + if fmu_folders: + lib_source_folder = highest_common_root_folder(fmu_folders) + + for simulator_properties in source_dict[simulators_key].values(): + # Component + component_name = simulator_properties["_attributes"]["name"] + # Connectors + component_connectors: dict[str, dict[Any, Any]] = {} + for temp_connector_key, connector in temp_connectors.items(): + if component_name in temp_connector_key: + component_connectors |= connector + # FMU + fmu_name = simulator_properties["_attributes"]["source"] + fmu_file = Path(fmu_name) + fmu_file = fmu_file.resolve() if fmu_file.is_absolute() else (source_folder / fmu_file).resolve() + fmu_file_relative_to_lib_source: Path = relative_path(lib_source_folder, fmu_file) + # Step Size + step_size: float | None = None + if "stepSize" in simulator_properties["_attributes"]: + step_size = float(simulator_properties["_attributes"]["stepSize"]) + # Initial values + component_initial_values: dict[str, dict[Any, Any]] = {} + if initial_values_key := find_key(simulator_properties, "InitialValues$"): + initial_values = simulator_properties[initial_values_key] + if initial_value_keys := find_keys(initial_values, "InitialValue$"): + for initial_value_key in initial_value_keys: + initial_value = initial_values[initial_value_key] + if data_type := find_type_identifier_in_keys(initial_value): + type_key = find_key(initial_value, f"{data_type}$") + if not type_key: + continue + _type: str = re.sub(r"(^\d{1,6}_)", "", type_key) + referenced_name = initial_value["_attributes"]["variable"] + value: float | int | bool | str + if _type == "Boolean": + value = bool(initial_value[type_key]["_attributes"]["value"]) + elif _type == "Integer": + value = int(initial_value[type_key]["_attributes"]["value"]) + elif _type == "Real": + value = float(initial_value[type_key]["_attributes"]["value"]) + else: + value = initial_value[type_key]["_attributes"]["value"] + component_initial_values |= {referenced_name: {"start": value}} + # Assemble component + component: dict[str, dict[Any, Any] | str | float | Path] = { + "connectors": component_connectors, + "fmu": fmu_file_relative_to_lib_source, + } + if step_size: + component["stepSize"] = step_size + if component_initial_values: + component["initialize"] = component_initial_values + # Save in components dict + components[component_name] = component + + # System Structure + system_structure: dict[str, dict[str, Any]] = { + "connections": connections, + "components": components, + } + + # Global Settings + # 1: Defaults + simulation: dict[str, Any] = { + "name": system_structure_file.stem, + "startTime": 0.0, + "baseStepSize": 0.01, + "algorithm": "fixedStep", + } + # 2: Overwrite defaults with values from source dict, where existing + if "_attributes" in source_dict: + attributes = source_dict["_attributes"] + if "StartTime" in attributes: + simulation["startTime"] = attributes["StartTime"] + if "BaseStepSize" in attributes: + simulation["baseStepSize"] = attributes["BaseStepSize"] + if "Algorithm" in attributes: + simulation["algorithm"] = attributes["Algorithm"] + + # Environment + environment: dict[str, Path] = {} + if enter_lib_source_as_relative_path: + try: + environment["libSource"] = relative_path( + from_path=target_folder, + to_path=lib_source_folder, + ) + except ValueError: + msg = ( + "Resolving relative path from target folder to libSource folder failed.\n" + "Absolute path for libSource is used instead." + ) + logger.warning(msg) + environment["libSource"] = lib_source_folder + else: + environment["libSource"] = lib_source_folder + + # Assemble case dict + case_dict = { + "_environment": environment, + "systemStructure": system_structure, + "run": { + "simulation": simulation, + }, + } + + source_file_name = source_dict.name.replace(".", "_") + target_file = Path.cwd() / f"caseDict_imported_from_{source_file_name}" + + DictWriter.write(case_dict, target_file, mode="w") + + return diff --git a/src/ospx/ospCaseBuilder.py b/src/ospx/ospCaseBuilder.py index a1d21cb2..7ed5748e 100644 --- a/src/ospx/ospCaseBuilder.py +++ b/src/ospx/ospCaseBuilder.py @@ -1,7 +1,6 @@ import logging import os from pathlib import Path -from typing import Union from dictIO import CppDict, DictReader @@ -15,16 +14,17 @@ class OspCaseBuilder: """Builder for OSP-specific configuration files needed to run an OSP (co-)simulation case.""" - def __init__(self): + def __init__(self) -> None: return @staticmethod def build( - case_dict_file: Union[str, os.PathLike[str]], + case_dict_file: str | os.PathLike[str], + *, inspect: bool = False, graph: bool = False, clean: bool = False, - ): + ) -> None: """Build the OSP-specific configuration files needed to run an OSP (co-)simulation case. Builds following files: @@ -39,18 +39,19 @@ def build( case_dict_file : Union[str, os.PathLike[str]] caseDict file. Contains all case-specific information OspCaseBuilder needs to generate the OSP files. inspect : bool, optional - inspect mode. If True, build() reads all properties from the FMUs but does not actually create the OSP case files, by default False + inspect mode. If True, build() reads all properties from the FMUs + but does not actually create the OSP case files, by default False graph : bool, optional if True, creates a dependency graph image using graphviz, by default False clean : bool, optional - if True, cleans up case folder and deletes any formerly created ospx files, e.g. OspSystemStructure.xml .fmu .csv etc. + if True, cleans up case folder and deletes any formerly created ospx files, + e.g. OspSystemStructure.xml .fmu .csv etc. Raises ------ FileNotFoundError if case_dict_file does not exist """ - # Make sure source_file argument is of type Path. If not, cast it to Path type. case_dict_file = case_dict_file if isinstance(case_dict_file, Path) else Path(case_dict_file) if not case_dict_file.exists(): @@ -68,21 +69,20 @@ def build( case = OspSimulationCase(case_dict) try: case.setup() - except Exception as e: - logger.exception(e) + except Exception: + logger.exception("Error during setup of OspSimulationCase.") return if inspect: # inspect and return - case._inspect() # pyright: ignore + case._inspect() # noqa: SLF001 # pyright: ignore[reportPrivateUsage] return - # case.write_osp_model_description_xmls() case.write_osp_system_structure_xml() case.write_system_structure_ssd() - if "postProcessing" in case_dict.keys(): - case._write_plot_config_json() # pyright: ignore + if "postProcessing" in case_dict: + case._write_plot_config_json() # noqa: SLF001 # pyright: ignore[reportPrivateUsage] case.write_statistics_dict() @@ -94,7 +94,7 @@ def build( return -def _clean_case_folder(case_folder: Path): +def _clean_case_folder(case_folder: Path) -> None: """Clean up the case folder and deletes any existing ospx files, e.g. modelDescription.xml .fmu .csv etc.""" import re from shutil import rmtree @@ -123,11 +123,8 @@ def _clean_case_folder(case_folder: Path): for file in files: if not re.search(except_pattern, str(file)): - # logger.info("%s in list to clean" % file) if file.is_file(): - # logger.info("file %s cleaned" % file) file.unlink(missing_ok=True) else: - # logger.info("dir %s removed" % file) rmtree(file) return diff --git a/src/ospx/ospSimulationCase.py b/src/ospx/ospSimulationCase.py index 3e9aa520..b99112a9 100644 --- a/src/ospx/ospSimulationCase.py +++ b/src/ospx/ospSimulationCase.py @@ -1,643 +1,645 @@ -import logging -import re -from pathlib import Path -from shutil import copy2 -from typing import Any, Dict, List, Union - -from dictIO import CppDict, DictWriter, XmlFormatter -from dictIO.utils.counter import BorgCounter -from dictIO.utils.path import relative_path - -from ospx import Simulation, System -from ospx.utils.dict import find_key - -__ALL__ = ["OspSimulationCase"] - -logger = logging.getLogger(__name__) - - -class OspSimulationCase: - """OSP Simulation Case.""" - - def __init__( - self, - case_dict: CppDict, - ): - self.counter = BorgCounter() - self.case_dict: CppDict = case_dict - self.case_folder: Path = case_dict.source_file.resolve().parent if case_dict.source_file else Path.cwd() - self.system_structure: System - - # Global settings - self.simulation: Simulation # general properties of the simulation case - self._read_simulation() - self.name: str = self.case_dict.name # initialize conservatively (with fallback path) - if self.simulation and self.simulation.name: - self.name = self.simulation.name - - # Library source path - self.lib_source: Path - self._resolve_lib_source_folder() - - def setup(self): - """Set up the OSP simulation case folder. - - Raises - ------ - ValueError - If an expected element in caseDict is missing - FileNotFoundError - If an FMU file referenced in caseDict does not exist - """ - logger.info(f"Set up OSP simulation case '{self.name}' in case folder: {self.case_folder}") - - # Check whether all referenced FMUs actually exist - self._check_all_fmus_exist() - - # Resolve all referenced FMUs and ensure they are accessible from the case folder via a relative path. - # This is necessary because OspSystemStructure.xml allows only relative paths - # as 'source' attribute in a element. - # If an FMU is not accessible via a relative path, it will be copied into the case folder. - self._resolve_all_fmus() - - # Read system structure - if "systemStructure" not in self.case_dict: - msg = f"no 'systemStructure' section found in {self.case_dict.name}. Cannot set up OSP simulation case." - logger.exception(msg) - raise ValueError(msg) - self.system_structure = System(self.case_dict["systemStructure"]) - - # Make sure all components have a step size defined - self._check_components_step_size() - - def _write_osp_model_description_xmls(self): - """Write the _OspModelDescription.xml files for all components defined in the system structure.""" - logger.info( - f"Write OspModelDescription.xml files for OSP simulation case '{self.name}' in case folder: {self.case_folder}" - ) - if not self.system_structure or not self.system_structure.components: - return - for component in self.system_structure.components.values(): - component.write_osp_model_description_xml() - return - - def write_osp_system_structure_xml(self): - """Write the OspSystemStructure.xml file.""" - # sourcery skip: class-extract-method, merge-dict-assign - - osp_system_structure_file = self.case_folder / "OspSystemStructure.xml" - self._clean(osp_system_structure_file) - - logger.info( - f"Write OspSystemStructure.xml file for OSP simulation case '{self.name}' in case folder: {self.case_folder}" - ) - - osp_system_structure: Dict[str, Any] = {} - osp_system_structure["_xmlOpts"] = { - "_nameSpaces": {"osp": "https://opensimulationplatform.com/xsd/OspModelDescription-1.0.0.xsd"}, - "_rootTag": "OspSystemStructure", - } - - # Global Settings - if self.simulation: - if self.simulation.start_time: - osp_system_structure["StartTime"] = self.simulation.start_time - if self.simulation.base_step_size: - osp_system_structure["BaseStepSize"] = self.simulation.base_step_size - if self.simulation.algorithm: - osp_system_structure["Algorithm"] = self.simulation.algorithm - - # Simulators (=Components) - simulators: Dict[str, Any] = {} - for index, (_, component) in enumerate(self.system_structure.components.items()): - simulator_key = f"{index:06d}_Simulator" - simulator_properties: Dict[str, Dict[str, Union[str, float, Dict[str, Any], Path]]] = { - "_attributes": { - "name": component.name, - "source": relative_path(self.case_folder, component.fmu.file), - } - } - if component.step_size: - write_step_size_to_osp_system_structure: bool = True - if ( - component.fmu.default_experiment - and component.fmu.default_experiment.step_size - and component.step_size == component.fmu.default_experiment.step_size - ): - write_step_size_to_osp_system_structure = False - if write_step_size_to_osp_system_structure: - simulator_properties["_attributes"]["stepSize"] = component.step_size - - if component.variables_with_start_values: - simulator_properties["InitialValues"] = {} - for index, (_, variable) in enumerate(component.variables_with_start_values.items()): - if variable.start is not None and variable.data_type is None: - logger.error( - f"component {component.name}: An initial value is defined for variable {variable.name}, but its data type is not defined.\n" - f"The initial value for variable {variable.name} will not be written into OspSystemStructure.xml.\n" - "OspSystemStructure.xml will be potentially wrong or incomplete." - ) - else: - initial_value_key = f"{index:06d}_InitialValue" - initial_value_properties: Dict[str, Any] = {} - initial_value_properties["_attributes"] = {"variable": variable.name} - if variable.data_type: - initial_value_properties[variable.data_type] = {"_attributes": {"value": variable.start}} - - simulator_properties["InitialValues"][initial_value_key] = initial_value_properties - simulators[simulator_key] = simulator_properties - - osp_system_structure["Simulators"] = simulators - - # Connections - connections: Dict[str, Dict[str, Any]] = {} - for connection in self.system_structure.connections.values(): - if not connection.is_valid: - continue - if connection.is_variable_connection: - connection_key = f"{self.counter():06d}_VariableConnection" - # (note: the order 000000, 000001 is essential here!) - connections[connection_key] = { - "000000_Variable": { - "_attributes": { - "simulator": connection.source_endpoint.component.name, - "name": connection.source_endpoint.variable_name, - } - }, - "000001_Variable": { - "_attributes": { - "simulator": connection.target_endpoint.component.name, - "name": connection.target_endpoint.variable_name, - } - }, - } - if connection.is_variable_group_connection: - connection_key = f"{self.counter():06d}_VariableGroupConnection" - # (note: the order 000000, 000001 is essential here!) - connections[connection_key] = { - "000000_VariableGroup": { - "_attributes": { - "simulator": connection.source_endpoint.component.name, - "name": connection.source_endpoint.variable_name, - } - }, - "000001_VariableGroup": { - "_attributes": { - "simulator": connection.target_endpoint.component.name, - "name": connection.target_endpoint.variable_name, - } - }, - } - osp_system_structure["Connections"] = connections - - # Write OspSystemStructure.xml - formatter = XmlFormatter() - DictWriter.write(osp_system_structure, osp_system_structure_file, formatter=formatter) - - self._correct_wrong_xml_namespace( - "OspSystemStructure.xml", - "?", - """""", - ) - - return - - def write_system_structure_ssd(self): - """Write the SystemStructure.ssd file.""" - - system_structure_ssd_file = self.case_folder / "SystemStructure.ssd" - self._clean(system_structure_ssd_file) - - # sourcery skip: merge-dict-assign - logger.info( - f"Write SystemStructure.ssd file for OSP simulation case '{self.name}' in case folder: {self.case_folder}" - ) - - system_structure_ssd: Dict[str, Any] = {} - system_structure_ssd["_xmlOpts"] = { - "_nameSpaces": { - "ssd": "file:///C:/Software/OSP/xsd/SystemStructureDescription", - "ssv": "file:///C:/Software/OSP/xsd/SystemStructureParameterValues", - "ssc": "file:///C:/Software/OSP/xsd/SystemStructureCommon", - }, - "_rootTag": "SystemStructureDescription", - } - system_structure_ssd["System"] = { - "_attributes": { - "name": self.name, - "description": self.name, - } - } - - # Global settings - default_experiment = { - "Annotations": { - "Annotation": { - "_attributes": {"type": "com.opensimulationplatform"}, - "Algorithm": { - "FixedStepAlgorithm": { - "_attributes": { - "baseStepSize": str(self.simulation.base_step_size), - "startTime": str(self.simulation.start_time), - "stopTime": str(self.simulation.stop_time), - } - } - }, - } - } - } - system_structure_ssd["DefaultExperiment"] = default_experiment - - # Components - components: Dict[str, Any] = {} - for component_name, component in self.system_structure.components.items(): - connectors: Dict[str, Dict[str, Any]] = {} - for connector in component.connectors.values(): - if connector.variable and connector.type: - connector_key = f"{self.counter():06d}_Connector" - # (note: the order 000000, 000001 is essential here!) - connectors[connector_key] = { - "_attributes": { - "name": connector.variable, - "kind": connector.type, - }, - "Real": {}, - } - element_key = f"{self.counter():06d}_Component" - components[element_key] = { - "_attributes": { - "name": component_name, - "source": relative_path(self.case_folder, component.fmu.file), - }, - "Connectors": connectors, - } - system_structure_ssd["System"]["Elements"] = components - - # Connections - connections: Dict[str, Any] = {} - for connection in self.system_structure.connections.values(): - if connection.source_endpoint and connection.target_endpoint: - connection_key = f"{self.counter():06d}_Connection" - connections[connection_key] = { - "_attributes": { - "startElement": connection.source_endpoint.component.name, - "startConnector": connection.source_endpoint.variable_name, - "endElement": connection.target_endpoint.component.name, - "endConnector": connection.target_endpoint.variable_name, - } - } - system_structure_ssd["System"]["Connections"] = connections - - # Write SystemStructure.ssd - formatter = XmlFormatter(omit_prefix=False) - DictWriter.write(system_structure_ssd, system_structure_ssd_file, formatter=formatter) - - return - - def write_statistics_dict(self): - """Write selected properties of the system structure into a statistics dict. - - I.e. for documentation or further statistical analysis. - """ - statistics_dict_file = self.case_folder / "statisticsDict" - # self._clean(statistics_dict_file) - - # sourcery skip: merge-dict-assign, simplify-dictionary-update - logger.info(f"Write statistics dict for OSP simulation case '{self.name}' in case folder: {self.case_folder}") - - statistics_dict = {} - - statistics_dict["simulation"] = {"name": self.simulation.name} - - statistics_dict["components"] = { - "count": len(self.system_structure.components.keys()), - "names": list(self.system_structure.components.keys()), - } - - statistics_dict["connections"] = { - "count": len(self.system_structure.connections.keys()), - "names": list(self.system_structure.connections.keys()), - } - - statistics_dict["connectors"] = { - "count": len(self.system_structure.connectors.keys()), - "names": list(self.system_structure.connectors.keys()), - } - - unit_list: List[str] = [] - display_unit_list: List[str] = [] - factors_list: List[float] = [] - offsets_list: List[float] = [] - for unit in self.system_structure.units.values(): - unit_list.append(unit.name) - display_unit_list.append(unit.display_unit.name) - factors_list.append(unit.display_unit.factor) - offsets_list.append(unit.display_unit.offset) - - statistics_dict["units"] = { - "count": len(self.system_structure.units.keys()), - "unitNames": unit_list, - "displayUnitNames": display_unit_list, - "factors": factors_list, - "offsets": offsets_list, - } - - statistics_dict["variables"] = { - "count": len(self.system_structure.variables.keys()), - "names": list(self.system_structure.variables.keys()), - } - - DictWriter.write(statistics_dict, statistics_dict_file, mode="w") - - def write_watch_dict(self): - """Write a case-specific watch dict file. - - The watch dict file can be used with watchCosim for - - convergence control - - convergence plotting - - extracting the results - """ - watch_dict_file = self.case_folder / "watchDict" - # self._clean(watch_dict_file) - - logger.info(f"Write watch dict for OSP simulation case '{self.name}' in case folder: {self.case_folder}") - - watch_dict: Dict[str, Any] = { - "datasources": {}, - "delimiter": ",", # 'objects': {}, - "simulation": {"name": self.simulation.name}, - } - - # @TODO: Time, StepCount, conn0, conn1, etc from modelDescription.xml ModelVariables - # should match connectors in caseDict for respective model. Improvement needed. - # FRALUM, 2021-xx-xx - time_column = 0 - # Components - for component_name, component in self.system_structure.components.items(): - no_of_connectors = len(component.connectors.keys()) - - data_columns = [1] + [x + 2 for x in range(no_of_connectors)] # f*** StepCount - watch_dict["datasources"].update({component_name: {"dataColumns": data_columns, "timeColumn": time_column}}) - - DictWriter.write(watch_dict, watch_dict_file, mode="w") - - return - - def _read_simulation(self): - """Read general simulation properties from case dict.""" - logger.info("reading simulation properties") # 0 - - if "run" not in self.case_dict: - return - if "simulation" not in self.case_dict["run"]: - return - simulation = Simulation() - simulation_properties = self.case_dict["run"]["simulation"] - if "name" in simulation_properties: - simulation.name = simulation_properties["name"] - if "startTime" in simulation_properties: - simulation.start_time = simulation_properties["startTime"] - if "stopTime" in simulation_properties: - simulation.stop_time = simulation_properties["stopTime"] - if "baseStepSize" in simulation_properties: - simulation.base_step_size = simulation_properties["baseStepSize"] - if "algorithm" in simulation_properties: - simulation.algorithm = simulation_properties["algorithm"] - self.simulation = simulation - - def _resolve_lib_source_folder(self): - """Resolve the library source folder.""" - self.lib_source = Path.cwd() # initialize conservatively (with fallback path) - if "_environment" in self.case_dict: - if "libSource" in self.case_dict["_environment"]: - self.lib_source = Path(self.case_dict["_environment"]["libSource"]) - else: - logger.warning( - f"no 'libSource' element found in {self.case_dict.name}['_environment']. Path to libSource will be set to current working directory." - ) - else: - logger.warning( - f"no '_environment' section found in {self.case_dict.name}. Path to libSource hence is unknown and will be set to current working directory." - ) - self.lib_source = self.lib_source.resolve().absolute() - - def _resolve_fmu_file(self, fmu_name: str) -> Path: - fmu_file: Path = Path(fmu_name) - if fmu_file.is_absolute(): - fmu_file = fmu_file.resolve() - else: - fmu_file = (self.lib_source / fmu_file).resolve() - return fmu_file - - def _check_all_fmus_exist(self): - """Check whether all referenced FMUs actually exist.""" - logger.debug("Check whether all referenced FMUs exist.") - components = self.case_dict["systemStructure"]["components"] - - for component_name, component_properties in components.items(): - if "fmu" not in component_properties: - msg = f"component {component_name}: 'fmu' element missing in case dict." - logger.exception(msg) - raise ValueError(msg) - fmu_file = self._resolve_fmu_file(component_properties["fmu"]) - if not fmu_file.exists(): - msg = f"component {component_name}: referenced FMU file {fmu_file} not found." - logger.exception(msg) - raise FileNotFoundError(fmu_file) - - def _resolve_all_fmus(self): - """Resolve all referenced FMUs and ensures they are accessible from the case folder via a relative path. - - This is necessary because OspSystemStructure.xml allows only relative paths - as 'source' attribute in a element. - If an FMU is not accessible via a relative path, the FMU will be copied into the case folder. - Note: If multiple components reference the same FMU, these get copied only once. - """ - - logger.debug("Ensure all referenced FMUs are accessible from the case folder via a relative path.") - components = self.case_dict["systemStructure"]["components"] - for _, component_properties in components.items(): - fmu_file = self._resolve_fmu_file(component_properties["fmu"]) - try: - _ = relative_path(self.case_folder, fmu_file) - except ValueError: - fmu_file = self._copy_fmu_to_case_folder(fmu_file) - component_properties["fmu"] = fmu_file - - def _copy_fmu_to_case_folder(self, fmu_file: Path) -> Path: - """Copy the passed in FMU file into the case folder. - - If also an accompanying _OspModelDescription.xml file exists in the same folder as the FMU file, - then also that OspModelDescription.xml file will be copied into the case folder. - - Parameters - ---------- - fmu_file : Path - FMU file to be copied into the case folder. - - Returns - ------- - Path - FMU file copied into the case folder. - """ - fmu_file_in_case_folder: Path = (self.case_folder / fmu_file.name).resolve().absolute() - if not fmu_file_in_case_folder.exists(): - logger.info(f"Copy FMU {fmu_file} --> {fmu_file_in_case_folder}") - copy2(fmu_file, self.case_folder) - # Check whether also an _OspModelDescription.xml file exists. - # If so, copy also that one. - osp_model_description_file = fmu_file.with_name(f"{fmu_file.stem}_OspModelDescription.xml") - if osp_model_description_file.exists(): - logger.info(f"Copy OspModelDescription {osp_model_description_file} --> {fmu_file_in_case_folder}") - copy2(osp_model_description_file, self.case_folder) - return fmu_file_in_case_folder - - def _check_components_step_size(self): - """Ensure that all components have a step size defined. - - If a components step size is undefined, it will be set to the base step size. - """ - if not self.system_structure or not self.system_structure.components: - return - if not self.simulation or not self.simulation.base_step_size: - return - for component in self.system_structure.components.values(): - if not component.step_size: - component.step_size = self.simulation.base_step_size - return - - def _set_components_step_size(self, step_size: float): - """Overwrite the step size of all components with the passed in value.""" - if not self.system_structure or not self.system_structure.components: - return - for component in self.system_structure.components.values(): - component.step_size = step_size - return - - def _inspect(self): - """Inspects all components and all FMUs for the public variable names and units they declare, as documented in their modelDescription.xml's. - - Results get logged to the console. - """ - logger.info(f"Inspect OSP simulation case '{self.name}' in case folder: {self.case_folder}") - - delim = "\t" * 3 - - log_string = ( - f"Components and related FMUs as defined in {self.case_dict.name}\n" f"\tcomponent{delim}fmu{delim}\n\n" - ) - for component_name, component in self.system_structure.components.items(): - log_string += f"\t{component_name}{delim}{component.fmu.file.name}\n" - logger.info(log_string + "\n") - - log_string = f"FMU attributes defined in the fmu's modelDescription.xml\n" f"\tfmu{delim}attributes{delim}" - for fmu_name, fmu in self.system_structure.fmus.items(): - log_string += f"\n\n\t{fmu_name}\n" - fmu_attributes = "\n".join( - f"\t{delim}{k}{delim}{v}" for k, v in fmu.model_description["_xmlOpts"]["_rootAttributes"].items() - ) - log_string += fmu_attributes - if default_experiment_key := find_key(fmu.model_description, "DefaultExperiment$"): - if "_attributes" in fmu.model_description[default_experiment_key]: - fmu_default_experiment = "\n".join( - f"\t{delim}{k}{delim}{v}" - for k, v in fmu.model_description[default_experiment_key]["_attributes"].items() - ) - log_string += f"\n{fmu_default_experiment}" - logger.info(log_string + "\n") - - log_string = ( - f"Unit definitions defined in the fmu's modelDescription.xml\n" - f"\tfmu{delim}unit{delim}display unit{delim}factor{delim}offset" - ) - for fmu_name, fmu in self.system_structure.fmus.items(): - log_string += f"\n\n\t{fmu_name}\n" - unit_definitions = "\n".join( - f"\t{delim}{unit_name}{delim}{unit.display_unit.name}\t{delim}{unit.display_unit.factor}{delim}{unit.display_unit.offset}" - for unit_name, unit in fmu.units.items() - ) - log_string += unit_definitions - logger.info(log_string + "\n") - - log_string = ( - f"Variables defined in the fmu's modelDescription.xml\n" f"\tfmu{delim}variable{delim}type{delim}unit" - ) - logger.info(log_string + "\n") - for fmu_name, fmu in self.system_structure.fmus.items(): - log_string = f"\t{fmu_name}\n" - variable_definitions = "\n".join( - f"\n\n\t{delim}{variable_name}{delim}{variable.data_type}{delim}{variable.unit}" - for variable_name, variable in fmu.variables.items() - ) - log_string += variable_definitions - logger.info(log_string + "\n") - - log_string = ( - f"Connectors defined in {self.case_dict.name}\n" - f"\tComponent{delim}Connector{delim}Variable{delim}VariableGroup{delim}Type" - ) - for component_name, component in self.system_structure.components.items(): - if component.connectors: - log_string += f"\n\n\t{component_name}\n" - connector_definitions = "\n".join( - f"\t{delim}{connector_name}{delim}{connector.variable}{delim}{connector.variable_group}{delim}{connector.type}" - for connector_name, connector in component.connectors.items() - ) - log_string += connector_definitions - logger.info(log_string + "\n") - - logger.info("Inspect mode: Finished.") - - def _write_plot_config_json(self): - """Write the PlotConfig.json file, containing postprocessing information.""" - - plot_config_file = self.case_folder / "PlotConfig.json" - self._clean(plot_config_file) - - if "plots" in self.case_dict["postProcessing"].keys(): - temp_dict: Dict[str, List[Dict[str, Any]]] = {"plots": []} - for plot in self.case_dict["postproc"]["plots"].values(): - variables: List[Dict[str, Any]] = [] - for ( - component_name, - component, - ) in self.system_structure.components.items(): - variables.extend( - { - "simulator": component_name, - "variable": connector.variable, - } - for connector_name, connector in component.connectors.items() - if connector_name in plot["ySignals"] - ) - - temp_dict["plots"].append( - { - "label": plot["title"], - "plotType": "trend", - "variables": variables, - } - ) - - DictWriter.write(temp_dict, plot_config_file) - - return - - def _correct_wrong_xml_namespace(self, file_name: str, pattern: str, replace: str): - """Substitutes namespace - (may be obsolete in future). - """ - buffer = "" - with open(file_name, "r") as f: - buffer = re.sub(pattern, replace, f.read()) - - with open(file_name, "w") as f: - _ = f.write(buffer) - - return - - def _clean(self, file_to_remove: Union[str, Path]): - """Clean up single file.""" - if isinstance(file_to_remove, str): - file_to_remove = self.case_folder / file_to_remove - file_to_remove.unlink(missing_ok=True) +import logging +import re +from pathlib import Path +from shutil import copy2 +from typing import Any + +from dictIO import CppDict, DictWriter, XmlFormatter +from dictIO.utils.counter import BorgCounter +from dictIO.utils.path import relative_path + +from ospx import Simulation, System +from ospx.utils.dict import find_key + +__ALL__ = ["OspSimulationCase"] + +logger = logging.getLogger(__name__) + + +class OspSimulationCase: + """OSP Simulation Case.""" + + def __init__( + self, + case_dict: CppDict, + ) -> None: + self.counter = BorgCounter() + self.case_dict: CppDict = case_dict + self.case_folder: Path = case_dict.source_file.resolve().parent if case_dict.source_file else Path.cwd() + self.system_structure: System + + # Global settings + self.simulation: Simulation # general properties of the simulation case + self._read_simulation() + self.name: str = self.case_dict.name # initialize conservatively (with fallback path) + if self.simulation and self.simulation.name: + self.name = self.simulation.name + + # Library source path + self.lib_source: Path + self._resolve_lib_source_folder() + + def setup(self) -> None: + """Set up the OSP simulation case folder. + + Raises + ------ + ValueError + If an expected element in caseDict is missing + FileNotFoundError + If an FMU file referenced in caseDict does not exist + """ + logger.info(f"Set up OSP simulation case '{self.name}' in case folder: {self.case_folder}") + + # Check whether all referenced FMUs actually exist + self._check_all_fmus_exist() + + # Resolve all referenced FMUs and ensure they are accessible from the case folder via a relative path. + # This is necessary because OspSystemStructure.xml allows only relative paths + # as 'source' attribute in a element. + # If an FMU is not accessible via a relative path, it will be copied into the case folder. + self._resolve_all_fmus() + + # Read system structure + if "systemStructure" not in self.case_dict: + msg = f"no 'systemStructure' section found in {self.case_dict.name}. Cannot set up OSP simulation case." + logger.exception(msg) + raise ValueError(msg) + self.system_structure = System(self.case_dict["systemStructure"]) + + # Make sure all components have a step size defined + self._check_components_step_size() + + def _write_osp_model_description_xmls(self) -> None: + """Write the _OspModelDescription.xml files for all components. + + Writes the _OspModelDescription.xml files for all components defined in the system structure. + """ + logger.info( + f"Write OspModelDescription.xml files for OSP simulation case '{self.name}' " + f"in case folder: {self.case_folder}" + ) + if not self.system_structure or not self.system_structure.components: + return + for component in self.system_structure.components.values(): + component.write_osp_model_description_xml() + return + + def write_osp_system_structure_xml(self) -> None: + """Write the OspSystemStructure.xml file.""" + # sourcery skip: class-extract-method, merge-dict-assign + + osp_system_structure_file = self.case_folder / "OspSystemStructure.xml" + self._clean(osp_system_structure_file) + + logger.info( + f"Write OspSystemStructure.xml file for OSP simulation case '{self.name}' " + f"in case folder: {self.case_folder}" + ) + + osp_system_structure: dict[str, Any] = {} + osp_system_structure["_xmlOpts"] = { + "_nameSpaces": {"osp": "https://opensimulationplatform.com/xsd/OspModelDescription-1.0.0.xsd"}, + "_rootTag": "OspSystemStructure", + } + + # Global Settings + if self.simulation: + if self.simulation.start_time: + osp_system_structure["StartTime"] = self.simulation.start_time + if self.simulation.base_step_size: + osp_system_structure["BaseStepSize"] = self.simulation.base_step_size + if self.simulation.algorithm: + osp_system_structure["Algorithm"] = self.simulation.algorithm + + # Simulators (=Components) + simulators: dict[str, Any] = {} + for i, (_, component) in enumerate(self.system_structure.components.items()): + simulator_key = f"{i:06d}_Simulator" + simulator_properties: dict[str, dict[str, str | float | dict[str, Any] | Path]] = { + "_attributes": { + "name": component.name, + "source": relative_path(self.case_folder, component.fmu.file), + } + } + if component.step_size: + write_step_size_to_osp_system_structure: bool = True + if ( + component.fmu.default_experiment + and component.fmu.default_experiment.step_size + and component.step_size == component.fmu.default_experiment.step_size + ): + write_step_size_to_osp_system_structure = False + if write_step_size_to_osp_system_structure: + simulator_properties["_attributes"]["stepSize"] = component.step_size + + if component.variables_with_start_values: + simulator_properties["InitialValues"] = {} + for ii, (_, variable) in enumerate(component.variables_with_start_values.items()): + if variable.start is not None and variable.data_type is None: + logger.error( + f"component {component.name}: An initial value is defined for variable {variable.name}, " + "but its data type is not defined.\n" + f"The initial value for variable {variable.name} will not be written " + "into OspSystemStructure.xml.\n" + "OspSystemStructure.xml will be potentially wrong or incomplete." + ) + else: + initial_value_key = f"{ii:06d}_InitialValue" + initial_value_properties: dict[str, Any] = {} + initial_value_properties["_attributes"] = {"variable": variable.name} + if variable.data_type: + initial_value_properties[variable.data_type] = {"_attributes": {"value": variable.start}} + + simulator_properties["InitialValues"][initial_value_key] = initial_value_properties + simulators[simulator_key] = simulator_properties + + osp_system_structure["Simulators"] = simulators + + # Connections + connections: dict[str, dict[str, Any]] = {} + for connection in self.system_structure.connections.values(): + if not connection.is_valid: + continue + if connection.is_variable_connection: + connection_key = f"{self.counter():06d}_VariableConnection" + # (note: the order 000000, 000001 is essential here!) + connections[connection_key] = { + "000000_Variable": { + "_attributes": { + "simulator": connection.source_endpoint.component.name, + "name": connection.source_endpoint.variable_name, + } + }, + "000001_Variable": { + "_attributes": { + "simulator": connection.target_endpoint.component.name, + "name": connection.target_endpoint.variable_name, + } + }, + } + if connection.is_variable_group_connection: + connection_key = f"{self.counter():06d}_VariableGroupConnection" + # (note: the order 000000, 000001 is essential here!) + connections[connection_key] = { + "000000_VariableGroup": { + "_attributes": { + "simulator": connection.source_endpoint.component.name, + "name": connection.source_endpoint.variable_name, + } + }, + "000001_VariableGroup": { + "_attributes": { + "simulator": connection.target_endpoint.component.name, + "name": connection.target_endpoint.variable_name, + } + }, + } + osp_system_structure["Connections"] = connections + + # Write OspSystemStructure.xml + formatter = XmlFormatter() + DictWriter.write(osp_system_structure, osp_system_structure_file, formatter=formatter) + + self._correct_wrong_xml_namespace( + "OspSystemStructure.xml", + "?", + """""", + ) + + return + + def write_system_structure_ssd(self) -> None: + """Write the SystemStructure.ssd file.""" + system_structure_ssd_file = self.case_folder / "SystemStructure.ssd" + self._clean(system_structure_ssd_file) + + # sourcery skip: merge-dict-assign + logger.info( + f"Write SystemStructure.ssd file for OSP simulation case '{self.name}' in case folder: {self.case_folder}" + ) + + system_structure_ssd: dict[str, Any] = {} + system_structure_ssd["_xmlOpts"] = { + "_nameSpaces": { + "ssd": "file:///C:/Software/OSP/xsd/SystemStructureDescription", + "ssv": "file:///C:/Software/OSP/xsd/SystemStructureParameterValues", + "ssc": "file:///C:/Software/OSP/xsd/SystemStructureCommon", + }, + "_rootTag": "SystemStructureDescription", + } + system_structure_ssd["System"] = { + "_attributes": { + "name": self.name, + "description": self.name, + } + } + + # Global settings + default_experiment = { + "Annotations": { + "Annotation": { + "_attributes": {"type": "com.opensimulationplatform"}, + "Algorithm": { + "FixedStepAlgorithm": { + "_attributes": { + "baseStepSize": str(self.simulation.base_step_size), + "startTime": str(self.simulation.start_time), + "stopTime": str(self.simulation.stop_time), + } + } + }, + } + } + } + system_structure_ssd["DefaultExperiment"] = default_experiment + + # Components + components: dict[str, Any] = {} + for component_name, component in self.system_structure.components.items(): + connectors: dict[str, dict[str, Any]] = {} + for connector in component.connectors.values(): + if connector.variable and connector.type: + connector_key = f"{self.counter():06d}_Connector" + # (note: the order 000000, 000001 is essential here!) + connectors[connector_key] = { + "_attributes": { + "name": connector.variable, + "kind": connector.type, + }, + "Real": {}, + } + element_key = f"{self.counter():06d}_Component" + components[element_key] = { + "_attributes": { + "name": component_name, + "source": relative_path(self.case_folder, component.fmu.file), + }, + "Connectors": connectors, + } + system_structure_ssd["System"]["Elements"] = components + + # Connections + connections: dict[str, Any] = {} + for connection in self.system_structure.connections.values(): + if connection.source_endpoint and connection.target_endpoint: + connection_key = f"{self.counter():06d}_Connection" + connections[connection_key] = { + "_attributes": { + "startElement": connection.source_endpoint.component.name, + "startConnector": connection.source_endpoint.variable_name, + "endElement": connection.target_endpoint.component.name, + "endConnector": connection.target_endpoint.variable_name, + } + } + system_structure_ssd["System"]["Connections"] = connections + + # Write SystemStructure.ssd + formatter = XmlFormatter(omit_prefix=False) + DictWriter.write(system_structure_ssd, system_structure_ssd_file, formatter=formatter) + + return + + def write_statistics_dict(self) -> None: + """Write selected properties of the system structure into a statistics dict. + + I.e. for documentation or further statistical analysis. + """ + statistics_dict_file = self.case_folder / "statisticsDict" + + # sourcery skip: merge-dict-assign, simplify-dictionary-update + logger.info(f"Write statistics dict for OSP simulation case '{self.name}' in case folder: {self.case_folder}") + + statistics_dict: dict[str, dict[str, Any]] = {} + + statistics_dict["simulation"] = {"name": self.simulation.name} + + statistics_dict["components"] = { + "count": len(self.system_structure.components.keys()), + "names": list(self.system_structure.components.keys()), + } + + statistics_dict["connections"] = { + "count": len(self.system_structure.connections.keys()), + "names": list(self.system_structure.connections.keys()), + } + + statistics_dict["connectors"] = { + "count": len(self.system_structure.connectors.keys()), + "names": list(self.system_structure.connectors.keys()), + } + + unit_list: list[str] = [] + display_unit_list: list[str] = [] + factors_list: list[float] = [] + offsets_list: list[float] = [] + for unit in self.system_structure.units.values(): + unit_list.append(unit.name) + display_unit_list.append(unit.display_unit.name) + factors_list.append(unit.display_unit.factor) + offsets_list.append(unit.display_unit.offset) + + statistics_dict["units"] = { + "count": len(self.system_structure.units.keys()), + "unitNames": unit_list, + "displayUnitNames": display_unit_list, + "factors": factors_list, + "offsets": offsets_list, + } + + statistics_dict["variables"] = { + "count": len(self.system_structure.variables.keys()), + "names": list(self.system_structure.variables.keys()), + } + + DictWriter.write(statistics_dict, statistics_dict_file, mode="w") + + def write_watch_dict(self) -> None: + """Write a case-specific watch dict file. + + The watch dict file can be used with watchCosim for + - convergence control + - convergence plotting + - extracting the results + """ + watch_dict_file = self.case_folder / "watchDict" + + logger.info(f"Write watch dict for OSP simulation case '{self.name}' in case folder: {self.case_folder}") + + watch_dict: dict[str, Any] = { + "datasources": {}, + "delimiter": ",", # 'objects': {}, + "simulation": {"name": self.simulation.name}, + } + + # @TODO: Time, StepCount, conn0, conn1, etc from modelDescription.xml ModelVariables + # should match connectors in caseDict for respective model. Improvement needed. + # FRALUM, 2021-xx-xx + time_column = 0 + # Components + for component_name, component in self.system_structure.components.items(): + no_of_connectors = len(component.connectors.keys()) + + data_columns = [1] + [x + 2 for x in range(no_of_connectors)] # f*** StepCount + watch_dict["datasources"].update({component_name: {"dataColumns": data_columns, "timeColumn": time_column}}) + + DictWriter.write(watch_dict, watch_dict_file, mode="w") + + return + + def _read_simulation(self) -> None: + """Read general simulation properties from case dict.""" + logger.info("reading simulation properties") # 0 + + if "run" not in self.case_dict: + return + if "simulation" not in self.case_dict["run"]: + return + simulation = Simulation() + simulation_properties = self.case_dict["run"]["simulation"] + if "name" in simulation_properties: + simulation.name = simulation_properties["name"] + if "startTime" in simulation_properties: + simulation.start_time = simulation_properties["startTime"] + if "stopTime" in simulation_properties: + simulation.stop_time = simulation_properties["stopTime"] + if "baseStepSize" in simulation_properties: + simulation.base_step_size = simulation_properties["baseStepSize"] + if "algorithm" in simulation_properties: + simulation.algorithm = simulation_properties["algorithm"] + self.simulation = simulation + + def _resolve_lib_source_folder(self) -> None: + """Resolve the library source folder.""" + self.lib_source = Path.cwd() # initialize conservatively (with fallback path) + if "_environment" in self.case_dict: + if "libSource" in self.case_dict["_environment"]: + self.lib_source = Path(self.case_dict["_environment"]["libSource"]) + else: + logger.warning( + f"no 'libSource' element found in {self.case_dict.name}['_environment']. " + "Path to libSource will be set to current working directory." + ) + else: + logger.warning( + f"no '_environment' section found in {self.case_dict.name}. " + "Path to libSource hence is unknown and will be set to current working directory." + ) + self.lib_source = self.lib_source.resolve().absolute() + + def _resolve_fmu_file(self, fmu_name: str) -> Path: + fmu_file: Path = Path(fmu_name) + fmu_file = fmu_file.resolve() if fmu_file.is_absolute() else (self.lib_source / fmu_file).resolve() + return fmu_file + + def _check_all_fmus_exist(self) -> None: + """Check whether all referenced FMUs actually exist.""" + logger.debug("Check whether all referenced FMUs exist.") + components = self.case_dict["systemStructure"]["components"] + + for component_name, component_properties in components.items(): + if "fmu" not in component_properties: + msg = f"component {component_name}: 'fmu' element missing in case dict." + logger.exception(msg) + raise ValueError(msg) + fmu_file = self._resolve_fmu_file(component_properties["fmu"]) + if not fmu_file.exists(): + msg = f"component {component_name}: referenced FMU file {fmu_file} not found." + logger.exception(msg) + raise FileNotFoundError(fmu_file) + + def _resolve_all_fmus(self) -> None: + """Resolve all referenced FMUs and ensures they are accessible from the case folder via a relative path. + + This is necessary because OspSystemStructure.xml allows only relative paths + as 'source' attribute in a element. + If an FMU is not accessible via a relative path, the FMU will be copied into the case folder. + Note: If multiple components reference the same FMU, these get copied only once. + """ + logger.debug("Ensure all referenced FMUs are accessible from the case folder via a relative path.") + components = self.case_dict["systemStructure"]["components"] + for component_properties in components.values(): + fmu_file = self._resolve_fmu_file(component_properties["fmu"]) + try: + _ = relative_path(self.case_folder, fmu_file) + except ValueError: + fmu_file = self._copy_fmu_to_case_folder(fmu_file) + component_properties["fmu"] = fmu_file + + def _copy_fmu_to_case_folder(self, fmu_file: Path) -> Path: + """Copy the passed in FMU file into the case folder. + + If also an accompanying _OspModelDescription.xml file exists in the same folder as the FMU file, + then also that OspModelDescription.xml file will be copied into the case folder. + + Parameters + ---------- + fmu_file : Path + FMU file to be copied into the case folder. + + Returns + ------- + Path + FMU file copied into the case folder. + """ + fmu_file_in_case_folder: Path = (self.case_folder / fmu_file.name).resolve().absolute() + if not fmu_file_in_case_folder.exists(): + logger.info(f"Copy FMU {fmu_file} --> {fmu_file_in_case_folder}") + copy2(fmu_file, self.case_folder) + # Check whether also an _OspModelDescription.xml file exists. + # If so, copy also that one. + osp_model_description_file = fmu_file.with_name(f"{fmu_file.stem}_OspModelDescription.xml") + if osp_model_description_file.exists(): + logger.info(f"Copy OspModelDescription {osp_model_description_file} --> {fmu_file_in_case_folder}") + copy2(osp_model_description_file, self.case_folder) + return fmu_file_in_case_folder + + def _check_components_step_size(self) -> None: + """Ensure that all components have a step size defined. + + If a components step size is undefined, it will be set to the base step size. + """ + if not self.system_structure or not self.system_structure.components: + return + if not self.simulation or not self.simulation.base_step_size: + return + for component in self.system_structure.components.values(): + if not component.step_size: + component.step_size = self.simulation.base_step_size + return + + def _set_components_step_size(self, step_size: float) -> None: + """Overwrite the step size of all components with the passed in value.""" + if not self.system_structure or not self.system_structure.components: + return + for component in self.system_structure.components.values(): + component.step_size = step_size + return + + def _inspect(self) -> None: + """Inspect all components and all FMUs for the public variable names and units they declare. + + Inspects all components and all FMUs for the public variable names and units they declare + in their modelDescription.xml's. Results get logged to the console. + """ + logger.info(f"Inspect OSP simulation case '{self.name}' in case folder: {self.case_folder}") + + delim = "\t" * 3 + + log_string = ( + f"Components and related FMUs as defined in {self.case_dict.name}\n" f"\tcomponent{delim}fmu{delim}\n\n" + ) + for component_name, component in self.system_structure.components.items(): + log_string += f"\t{component_name}{delim}{component.fmu.file.name}\n" + logger.info(log_string + "\n") + + log_string = f"FMU attributes defined in the fmu's modelDescription.xml\n" f"\tfmu{delim}attributes{delim}" + for fmu_name, fmu in self.system_structure.fmus.items(): + log_string += f"\n\n\t{fmu_name}\n" + fmu_attributes = "\n".join( + f"\t{delim}{k}{delim}{v}" for k, v in fmu.model_description["_xmlOpts"]["_rootAttributes"].items() + ) + log_string += fmu_attributes + default_experiment_key = find_key(fmu.model_description, "DefaultExperiment$") + if default_experiment_key and "_attributes" in fmu.model_description[default_experiment_key]: + fmu_default_experiment = "\n".join( + f"\t{delim}{k}{delim}{v}" + for k, v in fmu.model_description[default_experiment_key]["_attributes"].items() + ) + log_string += f"\n{fmu_default_experiment}" + logger.info(log_string + "\n") + + log_string = ( + f"Unit definitions defined in the fmu's modelDescription.xml\n" + f"\tfmu{delim}unit{delim}display unit{delim}factor{delim}offset" + ) + for fmu_name, fmu in self.system_structure.fmus.items(): + log_string += f"\n\n\t{fmu_name}\n" + unit_definitions = "\n".join( + f"\t{delim}{unit_name}{delim}{unit.display_unit.name}\t{delim}{unit.display_unit.factor}{delim}{unit.display_unit.offset}" + for unit_name, unit in fmu.units.items() + ) + log_string += unit_definitions + logger.info(log_string + "\n") + + log_string = ( + f"Variables defined in the fmu's modelDescription.xml\n" f"\tfmu{delim}variable{delim}type{delim}unit" + ) + logger.info(log_string + "\n") + for fmu_name, fmu in self.system_structure.fmus.items(): + log_string = f"\t{fmu_name}\n" + variable_definitions = "\n".join( + f"\n\n\t{delim}{variable_name}{delim}{variable.data_type}{delim}{variable.unit}" + for variable_name, variable in fmu.variables.items() + ) + log_string += variable_definitions + logger.info(log_string + "\n") + + log_string = ( + f"Connectors defined in {self.case_dict.name}\n" + f"\tComponent{delim}Connector{delim}Variable{delim}VariableGroup{delim}Type" + ) + for component_name, component in self.system_structure.components.items(): + if component.connectors: + log_string += f"\n\n\t{component_name}\n" + connector_definitions = "\n".join( + f"\t{delim}{connector_name}{delim}{connector.variable}{delim}{connector.variable_group}{delim}{connector.type}" + for connector_name, connector in component.connectors.items() + ) + log_string += connector_definitions + logger.info(log_string + "\n") + + logger.info("Inspect mode: Finished.") + + def _write_plot_config_json(self) -> None: + """Write the PlotConfig.json file, containing postprocessing information.""" + plot_config_file = self.case_folder / "PlotConfig.json" + self._clean(plot_config_file) + + if "plots" in self.case_dict["postProcessing"]: + temp_dict: dict[str, list[dict[str, Any]]] = {"plots": []} + for plot in self.case_dict["postproc"]["plots"].values(): + variables: list[dict[str, Any]] = [] + for ( + component_name, + component, + ) in self.system_structure.components.items(): + variables.extend( + { + "simulator": component_name, + "variable": connector.variable, + } + for connector_name, connector in component.connectors.items() + if connector_name in plot["ySignals"] + ) + + temp_dict["plots"].append( + { + "label": plot["title"], + "plotType": "trend", + "variables": variables, + } + ) + + DictWriter.write(temp_dict, plot_config_file) + + return + + def _correct_wrong_xml_namespace(self, file_name: str, pattern: str, replace: str) -> None: + """Substitutes namespace + (may be obsolete in future). + """ + buffer = "" + with Path(file_name).open() as f: + buffer = re.sub(pattern, replace, f.read()) + + with Path(file_name).open(mode="w") as f: + _ = f.write(buffer) + + return + + def _clean(self, file_to_remove: str | Path) -> None: + """Clean up single file.""" + if isinstance(file_to_remove, str): + file_to_remove = self.case_folder / file_to_remove + file_to_remove.unlink(missing_ok=True) diff --git a/src/ospx/simulation.py b/src/ospx/simulation.py index 01007dd6..89f81506 100644 --- a/src/ospx/simulation.py +++ b/src/ospx/simulation.py @@ -1,33 +1,32 @@ -import logging -from dataclasses import dataclass -from typing import Union - -__ALL__ = ["Simulation"] - -logger = logging.getLogger(__name__) - - -@dataclass() -class Simulation: - """Data class holding the attributes of the 'simulation' element inside OspSystemStructure.xml.""" - - name: Union[str, None] = None - start_time: Union[float, None] = None - stop_time: Union[float, None] = None - base_step_size: Union[float, None] = None - _algorithm: Union[str, None] = None - - @property - def algorithm(self) -> Union[str, None]: # noqa: D102 - return self._algorithm - - @algorithm.setter - def algorithm(self, value: str): # noqa: D102 - valid_values: list[str] = [ - "fixedStep", - ] - if value not in valid_values: - logger.error(f"variable {self.name}: algorithm value '{value}' is invalid.") - return - self._algorithm = value - return +import logging +from dataclasses import dataclass + +__ALL__ = ["Simulation"] + +logger = logging.getLogger(__name__) + + +@dataclass() +class Simulation: + """Data class holding the attributes of the 'simulation' element inside OspSystemStructure.xml.""" + + name: str | None = None + start_time: float | None = None + stop_time: float | None = None + base_step_size: float | None = None + _algorithm: str | None = None + + @property + def algorithm(self) -> str | None: + return self._algorithm + + @algorithm.setter + def algorithm(self, value: str) -> None: + valid_values: list[str] = [ + "fixedStep", + ] + if value not in valid_values: + logger.error(f"variable {self.name}: algorithm value '{value}' is invalid.") + return + self._algorithm = value + return diff --git a/src/ospx/system.py b/src/ospx/system.py index 3af8becb..f827ec8a 100644 --- a/src/ospx/system.py +++ b/src/ospx/system.py @@ -1,182 +1,184 @@ -import logging -from typing import Any, MutableMapping, Union - -from ospx import Component, Connection, Connector, Endpoint -from ospx.fmi import FMU, ScalarVariable, Unit - -__ALL__ = ["System"] - -logger = logging.getLogger(__name__) - - -class System: - """The system structure describes the topology of the co-simulated system. - - A system structure can contain an arbitrary number of components. - Components can be connected through connections. - Connections relate a source endpoint with a target endpoint. - Both component variables and component connectors can be used as endpoints in a connection. - """ - - def __init__(self, properties: MutableMapping[Any, Any]): - self._components: dict[str, Component] = {} - self._connections: dict[str, Connection] = {} - self._read_components(properties) - self._read_connections(properties) - - @property - def fmus(self) -> dict[str, FMU]: - """Returns a dict with all FMUs referenced by components contained in the system. - - Returns - ------- - dict[str, FMU] - dict with all FMUs - """ - return {component.fmu.file.name: component.fmu for component in self.components.values() if component.fmu} - - @property - def components(self) -> dict[str, Component]: - """Returns a dict with all components contained in the system. - - Returns - ------- - dict[str, Component] - dict with all components - """ - return self._components - - @property - def connections(self) -> dict[str, Connection]: - """Returns a dict with all connections defined in the system. - - Returns - ------- - dict[str, Connection] - dict with all connections - """ - return self._connections - - @property - def units(self) -> dict[str, Unit]: - """Returns a combined dict with all units - from all components contained in the system. - - Returns - ------- - dict[str, Unit] - dict with all units from all components - """ - units: dict[str, Unit] = {} - for component in self.components.values(): - if component.units: - units |= component.units - return units - - @property - def connectors(self) -> dict[str, Connector]: - """Returns a combined dict with all connectors - from all components contained in the system. - - Returns - ------- - dict[str, Connector] - dict with all connectors from all components - """ - connectors: dict[str, Connector] = {} - for component in self.components.values(): - if component.connectors: - connectors |= component.connectors - return connectors - - @property - def variables(self) -> dict[str, ScalarVariable]: - """Returns a combined dict with all scalar variables - from all components contained in the system. - - Returns - ------- - dict[str, ScalarVariable] - dict with all scalar variables from all components - """ - variables: dict[str, ScalarVariable] = {} - for component in self.components.values(): - if component.variables: - variables |= component.variables - return variables - - def _read_components(self, properties: MutableMapping[Any, Any]): - """Read components from (case dict) properties.""" - logger.info("read components from case dict") - self._components.clear() - if "components" not in properties: - return - for component_name, component_properties in properties["components"].items(): - component = Component(component_name, component_properties) - self._components[component.name] = component - - def _read_connections(self, properties: MutableMapping[Any, Any]): - """Read connections from (case dict) properties.""" - logger.info("read connections from case dict") - self._connections.clear() - if "connections" not in properties: - return - for connection_name, connection_properties in properties["connections"].items(): - source_endpoint: Union[Endpoint, None] = None - target_endpoint: Union[Endpoint, None] = None - if "source" in connection_properties: - source_endpoint = self._read_endpoint(connection_properties["source"]) - if "target" in connection_properties: - target_endpoint = self._read_endpoint(connection_properties["target"]) - if source_endpoint and target_endpoint: - connection = Connection( - name=connection_name, - source_endpoint=source_endpoint, - target_endpoint=target_endpoint, - ) - self._connections[connection.name] = connection - else: - logger.error( - f"connection {connection_name}: connection could not be resolved. Please recheck connection properties in case dict." - ) - return - - def _read_endpoint(self, properties: MutableMapping[Any, Any]) -> Union[Endpoint, None]: - if "component" not in properties: - return None - component: Union[Component, None] = None - connector: Union[Connector, None] = None - variable: Union[ScalarVariable, None] = None - - component_name: str = properties["component"] - if component_name in self.components: - component = self.components[component_name] - - if "connector" in properties: - connector_name = properties["connector"] - if component and connector_name in component.connectors: - connector = component.connectors[connector_name] - else: - for _, c in self.components.items(): - if connector_name in c.connectors: - component = c - connector = c.connectors[connector_name] - break - - if "variable" in properties: - variable_name = properties["variable"] - if component and variable_name in component.variables: - variable = component.variables[variable_name] - - if not component: - return None - - if connector or variable: - endpoint: Endpoint = Endpoint( - component=component, - connector=connector, - variable=variable, - ) - return endpoint if endpoint.is_valid else None - - return None +import logging +from collections.abc import MutableMapping +from typing import Any + +from ospx import Component, Connection, Connector, Endpoint +from ospx.fmi import FMU, ScalarVariable, Unit + +__ALL__ = ["System"] + +logger = logging.getLogger(__name__) + + +class System: + """The system structure describes the topology of the co-simulated system. + + A system structure can contain an arbitrary number of components. + Components can be connected through connections. + Connections relate a source endpoint with a target endpoint. + Both component variables and component connectors can be used as endpoints in a connection. + """ + + def __init__(self, properties: MutableMapping[Any, Any]) -> None: + self._components: dict[str, Component] = {} + self._connections: dict[str, Connection] = {} + self._read_components(properties) + self._read_connections(properties) + + @property + def fmus(self) -> dict[str, FMU]: + """Returns a dict with all FMUs referenced by components contained in the system. + + Returns + ------- + dict[str, FMU] + dict with all FMUs + """ + return {component.fmu.file.name: component.fmu for component in self.components.values() if component.fmu} + + @property + def components(self) -> dict[str, Component]: + """Returns a dict with all components contained in the system. + + Returns + ------- + dict[str, Component] + dict with all components + """ + return self._components + + @property + def connections(self) -> dict[str, Connection]: + """Returns a dict with all connections defined in the system. + + Returns + ------- + dict[str, Connection] + dict with all connections + """ + return self._connections + + @property + def units(self) -> dict[str, Unit]: + """Returns a combined dict with all units + from all components contained in the system. + + Returns + ------- + dict[str, Unit] + dict with all units from all components + """ + units: dict[str, Unit] = {} + for component in self.components.values(): + if component.units: + units |= component.units + return units + + @property + def connectors(self) -> dict[str, Connector]: + """Returns a combined dict with all connectors + from all components contained in the system. + + Returns + ------- + dict[str, Connector] + dict with all connectors from all components + """ + connectors: dict[str, Connector] = {} + for component in self.components.values(): + if component.connectors: + connectors |= component.connectors + return connectors + + @property + def variables(self) -> dict[str, ScalarVariable]: + """Returns a combined dict with all scalar variables + from all components contained in the system. + + Returns + ------- + dict[str, ScalarVariable] + dict with all scalar variables from all components + """ + variables: dict[str, ScalarVariable] = {} + for component in self.components.values(): + if component.variables: + variables |= component.variables + return variables + + def _read_components(self, properties: MutableMapping[Any, Any]) -> None: + """Read components from (case dict) properties.""" + logger.info("read components from case dict") + self._components.clear() + if "components" not in properties: + return + for component_name, component_properties in properties["components"].items(): + component = Component(component_name, component_properties) + self._components[component.name] = component + + def _read_connections(self, properties: MutableMapping[Any, Any]) -> None: + """Read connections from (case dict) properties.""" + logger.info("read connections from case dict") + self._connections.clear() + if "connections" not in properties: + return + for connection_name, connection_properties in properties["connections"].items(): + source_endpoint: Endpoint | None = None + target_endpoint: Endpoint | None = None + if "source" in connection_properties: + source_endpoint = self._read_endpoint(connection_properties["source"]) + if "target" in connection_properties: + target_endpoint = self._read_endpoint(connection_properties["target"]) + if source_endpoint and target_endpoint: + connection = Connection( + name=connection_name, + source_endpoint=source_endpoint, + target_endpoint=target_endpoint, + ) + self._connections[connection.name] = connection + else: + logger.error( + f"connection {connection_name}: connection could not be resolved. " + "Please recheck connection properties in case dict." + ) + return + + def _read_endpoint(self, properties: MutableMapping[Any, Any]) -> Endpoint | None: + if "component" not in properties: + return None + component: Component | None = None + connector: Connector | None = None + variable: ScalarVariable | None = None + + component_name: str = properties["component"] + if component_name in self.components: + component = self.components[component_name] + + if "connector" in properties: + connector_name = properties["connector"] + if component and connector_name in component.connectors: + connector = component.connectors[connector_name] + else: + for c in self.components.values(): + if connector_name in c.connectors: + component = c + connector = c.connectors[connector_name] + break + + if "variable" in properties: + variable_name = properties["variable"] + if component and variable_name in component.variables: + variable = component.variables[variable_name] + + if not component: + return None + + if connector or variable: + endpoint: Endpoint = Endpoint( + component=component, + connector=connector, + variable=variable, + ) + return endpoint if endpoint.is_valid else None + + return None diff --git a/src/ospx/utils/dateTime.py b/src/ospx/utils/dateTime.py index 4e147d29..b8dc3bad 100644 --- a/src/ospx/utils/dateTime.py +++ b/src/ospx/utils/dateTime.py @@ -1,10 +1,9 @@ -from datetime import datetime as datetime -from typing import Tuple +from datetime import datetime __all__ = ["calc_time"] -def calc_time(time0: datetime, time1: datetime) -> Tuple[int, int, int, int, int]: +def calc_time(time0: datetime, time1: datetime) -> tuple[int, int, int, int, int]: """Calculate the time delta between time0 and time1. Calculates the time delta between time0 and time1 and diff --git a/src/ospx/utils/dict.py b/src/ospx/utils/dict.py index eb25e994..8faa4943 100644 --- a/src/ospx/utils/dict.py +++ b/src/ospx/utils/dict.py @@ -1,56 +1,58 @@ -import re -from collections import OrderedDict -from typing import Any, List, MutableMapping, Set, Union - - -def find_key(dict: MutableMapping[Any, Any], pattern: str) -> Union[str, None]: - """Find the first key in dict that matches the given pattern.""" - try: - return [k for k in dict.keys() if re.search(pattern, k)][0] - except Exception: - return None - - -def find_keys(dict: MutableMapping[Any, Any], pattern: str) -> Union[List[str], None]: - """Find all keys in dict that match the given pattern.""" - try: - return [k for k in dict.keys() if re.search(pattern, k)] - except Exception: - return None - - -def find_type_identifier_in_keys(dict: MutableMapping[Any, Any]) -> Union[str, None]: - """Find the first key name in dict that contains one of the following type identifier strings: - [Integer|Real|Boolean|Enumeration|String|Unknown]. - """ - key_list: List[str] = ["Integer", "Real", "Boolean", "Enumeration", "String", "Unkown"] - type_identifier: List[str] = [] - for key in dict: - key_without_index = re.sub(r"^\d{6}_", "", key) - - if key_without_index in key_list: - type_identifier.append(key_without_index) - - return type_identifier[0] if type_identifier else None - - -def shrink_dict(dict: MutableMapping[Any, Any], unique_key: Union[List[str], None] = None) -> MutableMapping[Any, Any]: - """Identify doubled entries in the passed in dict and return a new dict with doubled entries removed.""" - _unique_key: List[str] = unique_key or [] - unique_keys_string: str = "['" + "']['".join(_unique_key) + "']" - # sort an ordered dict for attribute (child) where the dict is to make unique for - eval_string: str = f"sorted(dict.items(), key=lambda x: str(x[1]{unique_keys_string}))" - - # Identify doublettes and collect them for subsequent removal - seen: Set[Any] = set([]) - remove_key: List[Any] = [] - - # value is necessary here as it is used in the eval statements below. Do not delete it. - for key, value in OrderedDict(eval(eval_string)).items(): # noqa: B007 - proove_value = eval(f"value{unique_keys_string}") - if proove_value in seen: - remove_key.append(key) - else: - seen.add(eval(f"value{unique_keys_string}")) - - return {key: dict[key] for key in dict.keys() if key not in remove_key} +import re +from ast import literal_eval +from collections import OrderedDict +from collections.abc import MutableMapping +from typing import Any + + +def find_key(dict_in: MutableMapping[Any, Any], pattern: str) -> str | None: + """Find the first key in dict that matches the given pattern.""" + try: + return next(key for key in dict_in if re.search(pattern, key)) + except Exception: # noqa: BLE001 + return None + + +def find_keys(dict_in: MutableMapping[Any, Any], pattern: str) -> list[str] | None: + """Find all keys in dict that match the given pattern.""" + try: + return [k for k in dict_in if re.search(pattern, k)] + except Exception: # noqa: BLE001 + return None + + +def find_type_identifier_in_keys(dict_in: MutableMapping[Any, Any]) -> str | None: + """Find the first key name in dict that contains one of the following type identifier strings: + [Integer|Real|Boolean|Enumeration|String|Unknown]. + """ + key_list: list[str] = ["Integer", "Real", "Boolean", "Enumeration", "String", "Unkown"] + type_identifier: list[str] = [] + for key in dict_in: + key_without_index = re.sub(r"^\d{6}_", "", key) + + if key_without_index in key_list: + type_identifier.append(key_without_index) + + return type_identifier[0] if type_identifier else None + + +def shrink_dict(dict_in: MutableMapping[Any, Any], unique_key: list[str] | None = None) -> dict[Any, Any]: + """Identify doubled entries in the passed in dict and return a new dict with doubled entries removed.""" + _unique_key: list[str] = unique_key or [] + unique_keys_string: str = "['" + "']['".join(_unique_key) + "']" + # sort an ordered dict for attribute (child) where the dict is to make unique for + eval_string: str = f"sorted(dict.items(), key=lambda x: str(x[1]{unique_keys_string}))" + + # Identify doublettes and collect them for subsequent removal + seen: set[Any] = set() + remove_key: list[Any] = [] + + # value is necessary here as it is used in the eval statements below. Do not delete it. + for key, value in OrderedDict(eval(eval_string)).items(): # noqa: B007, PERF102, S307 + proove_value = literal_eval(f"value{unique_keys_string}") + if proove_value in seen: + remove_key.append(key) + else: + seen.add(literal_eval(f"value{unique_keys_string}")) + + return {key: dict_in[key] for key in dict_in if key not in remove_key} diff --git a/src/ospx/utils/logging.py b/src/ospx/utils/logging.py index 62cd35a8..9faaf25b 100644 --- a/src/ospx/utils/logging.py +++ b/src/ospx/utils/logging.py @@ -1,7 +1,8 @@ +"""Functions to configure logging for the application.""" + import logging import sys from pathlib import Path -from typing import Union __all__ = ["configure_logging"] @@ -10,33 +11,39 @@ def configure_logging( log_level_console: str = "WARNING", - log_file: Union[Path, None] = None, + log_file: Path | None = None, log_level_file: str = "WARNING", -): # sourcery skip: extract-duplicate-method - """Configure logging and set levels for log output to console and file. +) -> None: + """Configure logging for the application, allowing for both console and file logging. + + Sets the log levels and formats for the output, ensuring that logs are captured as specified. Parameters ---------- log_level_console : str, optional log level for console output, by default "WARNING" - log_file : Union[Path, None], optional - log file to be used (optional), by default None + log_file : Path | None, optional + log file to be used. If None, file logging is disabled. by default None log_level_file : str, optional log level for file output, by default "WARNING" Raises ------ - ValueError + TypeError if an invalid value for log_level_console or log_level_file is passed - """ + Examples + -------- + configure_logging(log_level_console="INFO", log_file=Path("app.log"), log_level_file="DEBUG") + """ + # sourcery skip: extract-duplicate-method, extract-method log_level_console_numeric = getattr(logging, log_level_console.upper(), None) if not isinstance(log_level_console_numeric, int): - raise ValueError(f"Invalid log level to console: {log_level_console_numeric}") + raise TypeError(f"Invalid log level to console: {log_level_console_numeric}") log_level_file_numeric = getattr(logging, log_level_file.upper(), None) if not isinstance(log_level_file_numeric, int): - raise ValueError(f"Invalid log level to file: {log_level_file_numeric}") + raise TypeError(f"Invalid log level to file: {log_level_file_numeric}") root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) @@ -51,6 +58,7 @@ def configure_logging( if not log_file.parent.exists(): log_file.parent.mkdir(parents=True, exist_ok=True) file_handler = logging.FileHandler(str(log_file.absolute()), "a") + print(f"Logging to: {log_file.absolute()}") # noqa: T201 file_handler.setLevel(log_level_file_numeric) file_formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s", "%Y-%m-%d %H:%M:%S") file_handler.setFormatter(file_formatter) diff --git a/src/ospx/utils/plotting.py b/src/ospx/utils/plotting.py index 9374f05d..2c96a216 100644 --- a/src/ospx/utils/plotting.py +++ b/src/ospx/utils/plotting.py @@ -2,9 +2,9 @@ import logging import os import re -from datetime import datetime as datetime +from collections.abc import MutableMapping +from datetime import UTC, datetime from pathlib import Path -from typing import Dict, MutableMapping, Union import matplotlib.pyplot as plt from matplotlib.figure import Figure @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) -def create_meta_dict(title: str) -> Dict[str, str]: +def create_meta_dict(title: str) -> dict[str, str]: """Create a default of meta dict which can be passed to save_figure(). Parameters @@ -30,7 +30,7 @@ def create_meta_dict(title: str) -> Dict[str, str]: "Author": "VFW", "Description": title, "Copyright": "VFW", - "Creation Time": str(datetime.now()), + "Creation Time": str(datetime.now(tz=UTC)), "Software": "matplotlib", "Disclaimer": "", "Warning": "", @@ -41,12 +41,12 @@ def create_meta_dict(title: str) -> Dict[str, str]: def save_figure( - fig: Figure, # type: ignore + fig: Figure, extension: str, - path: Union[str, os.PathLike[str]], + path: str | os.PathLike[str], title: str, meta_dict: MutableMapping[str, str], -): +) -> None: """Save a figure object as image file. Parameters @@ -62,11 +62,10 @@ def save_figure( meta_dict : MutableMapping[str, str] a dict with additional meta properties. Will be passed as-is to figure.savefig() """ - # Make sure path argument is of type Path. If not, cast it to Path type. path = path if isinstance(path, Path) else Path(path) - if not os.path.exists(path): + if not path.exists(): logger.info(f"path {path} does not exist, creating") # 0 path.mkdir(parents=True, exist_ok=True) @@ -88,23 +87,21 @@ def save_figure( title_in_file_name = title_in_file_name.replace(item[0], item[1]) # limit overall length to 128 characters - if len(title_in_file_name) >= 80: - title_in_file_name = "".join(list(title_in_file_name[:59]) + [".", "."] + list(title_in_file_name[-19:])) + if len(title_in_file_name) >= 80: # noqa: PLR2004 + title_in_file_name = "".join([*list(title_in_file_name[:59]), ".", ".", *list(title_in_file_name[-19:])]) - save_file: str + save_file: str = f"{title_in_file_name}.{extension}" if path: - save_file = str(path / f"{title_in_file_name}.{extension}") - else: - save_file = f"{title_in_file_name}.{extension}" + save_file = str(path / save_file) - fig.savefig( # type: ignore + fig.savefig( # pyright: ignore[reportUnknownMemberType] save_file, orientation="landscape", - # papertype = 'a4', + # papertype = 'a4', # noqa: ERA001 format=extension, transparent=False, metadata=meta_dict, ) - plt.close(fig) # type: ignore + plt.close(fig) return diff --git a/src/ospx/utils/zip.py b/src/ospx/utils/zip.py index 4dd60673..4183359a 100644 --- a/src/ospx/utils/zip.py +++ b/src/ospx/utils/zip.py @@ -4,13 +4,12 @@ from pathlib import Path from shutil import copyfile from tempfile import mkstemp -from typing import Tuple, Union from zipfile import ZIP_DEFLATED, ZipFile logger = logging.getLogger(__name__) -def read_file_content_from_zip(zip_file: Path, file_name: str) -> Union[str, None]: +def read_file_content_from_zip(zip_file: Path, file_name: str) -> str | None: """ belongs to zip functions read a single file. @@ -27,11 +26,11 @@ def read_file_content_from_zip(zip_file: Path, file_name: str) -> Union[str, Non logger.exception("misc.zip.read_file_content_from_zip failed") finally: os.close(file_handle) - os.remove(temp_name) + Path(temp_name).unlink(missing_ok=True) return file_content -def rename_file_in_zip(zip_file: Path, file_name: str, new_file_name: str) -> Union[ZipFile, None]: +def rename_file_in_zip(zip_file: Path, file_name: str, new_file_name: str) -> ZipFile | None: """ belongs to zip functions rename files. @@ -39,15 +38,14 @@ def rename_file_in_zip(zip_file: Path, file_name: str, new_file_name: str) -> Un file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None try: - with ZipFile(zip_file, "r") as zip_read: - with ZipFile(temp_name, "w") as zip_write: - for item in zip_read.infolist(): - if item.filename != file_name: - data = zip_read.read(item.filename) - else: - data = zip_read.read(item.filename) - item.filename = new_file_name - zip_write.writestr(item, data) + with ZipFile(zip_file, "r") as zip_read, ZipFile(temp_name, "w") as zip_write: + for item in zip_read.infolist(): + if item.filename != file_name: + data = zip_read.read(item.filename) + else: + data = zip_read.read(item.filename) + item.filename = new_file_name + zip_write.writestr(item, data) _ = copyfile(temp_name, zip_file) updated_zip_file = ZipFile(zip_file, mode="a") @@ -56,12 +54,12 @@ def rename_file_in_zip(zip_file: Path, file_name: str, new_file_name: str) -> Un logger.exception("misc.zip.rename_file_in_zip failed") finally: os.close(file_handle) - os.remove(temp_name) + Path(temp_name).unlink(missing_ok=True) return updated_zip_file -def remove_files_from_zip(zip_file: Path, *file_names: str) -> Union[ZipFile, None]: +def remove_files_from_zip(zip_file: Path, *file_names: str) -> ZipFile | None: """ belongs to zip functions remove files. @@ -69,12 +67,11 @@ def remove_files_from_zip(zip_file: Path, *file_names: str) -> Union[ZipFile, No file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None try: - with ZipFile(zip_file, "r") as zip_read: - with ZipFile(temp_name, "w") as zip_write: - for item in zip_read.infolist(): - if item.filename not in file_names: - data = zip_read.read(item.filename) - zip_write.writestr(item, data) + with ZipFile(zip_file, "r") as zip_read, ZipFile(temp_name, "w") as zip_write: + for item in zip_read.infolist(): + if item.filename not in file_names: + data = zip_read.read(item.filename) + zip_write.writestr(item, data) _ = copyfile(temp_name, zip_file) @@ -85,12 +82,12 @@ def remove_files_from_zip(zip_file: Path, *file_names: str) -> Union[ZipFile, No finally: os.close(file_handle) - os.remove(temp_name) + Path(temp_name).unlink(missing_ok=True) return updated_zip_file -def add_file_content_to_zip(zip_file: Path, file_name: str, file_content: str) -> Union[ZipFile, None]: +def add_file_content_to_zip(zip_file: Path, file_name: str, file_content: str) -> ZipFile | None: """ belongs to zip functions add a single file and its ascii content. @@ -107,35 +104,33 @@ def add_file_content_to_zip(zip_file: Path, file_name: str, file_content: str) - logger.exception("misc.zip.add_file_content_to_zip failed") finally: os.close(file_handle) - os.remove(temp_name) + Path(temp_name).unlink(missing_ok=True) return updated_zip_file def substitute_text_in_zip( - zip_file: Path, file_name_pattern: str = "", subst: Tuple[str, str] = ("", "") -) -> Union[ZipFile, None]: + zip_file: Path, file_name_pattern: str = "", subst: tuple[str, str] = ("", "") +) -> ZipFile | None: """ belongs to zip functions substitutes a given string in all files matching the passed file name pattern. """ - file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None try: - with ZipFile(zip_file, "r") as zip_read: - with ZipFile(temp_name, "w") as zip_write: - zip_write.comment = zip_read.comment # preserve the comment - for item in zip_read.infolist(): - if not re.search(file_name_pattern, item.filename): - zip_write.writestr(item, zip_read.read(item.filename)) - else: - temp = zip_read.read(item.filename) - source = (re.findall(subst[0], str(temp)))[0] - if not str(source): - logger.warning(f'substitution source is empty:\'{" ".join(source)}\'') - temp = temp.replace(bytes(source, "utf-8"), bytes(subst[1], "utf-8")) - zip_write.writestr(item, temp) + with ZipFile(zip_file, "r") as zip_read, ZipFile(temp_name, "w") as zip_write: + zip_write.comment = zip_read.comment # preserve the comment + for item in zip_read.infolist(): + if not re.search(file_name_pattern, item.filename): + zip_write.writestr(item, zip_read.read(item.filename)) + else: + temp = zip_read.read(item.filename) + source = (re.findall(subst[0], str(temp)))[0] + if not str(source): + logger.warning(f'substitution source is empty:\'{" ".join(source)}\'') + temp = temp.replace(bytes(source, "utf-8"), bytes(subst[1], "utf-8")) + zip_write.writestr(item, temp) updated_zip_file = ZipFile(zip_file, mode="a") @@ -143,29 +138,27 @@ def substitute_text_in_zip( logger.exception("misc.zip.substitute_text_in_zip failed") finally: os.close(file_handle) - os.remove(temp_name) + Path(temp_name).unlink(missing_ok=True) return updated_zip_file -def update_file_content_in_zip(zip_file: Path, file_name: str, file_content: str) -> Union[ZipFile, None]: +def update_file_content_in_zip(zip_file: Path, file_name: str, file_content: str) -> ZipFile | None: """ belongs to zip functions updates the ascii content of a single file. """ - file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None try: - with ZipFile(zip_file, "r") as zip_read: - with ZipFile(temp_name, "w") as zip_write: - zip_write.comment = zip_read.comment # preserve the comment - for item in zip_read.infolist(): - if item.filename != file_name: - zip_write.writestr(item, zip_read.read(item.filename)) + with ZipFile(zip_file, "r") as zip_read, ZipFile(temp_name, "w") as zip_write: + zip_write.comment = zip_read.comment # preserve the comment + for item in zip_read.infolist(): + if item.filename != file_name: + zip_write.writestr(item, zip_read.read(item.filename)) with ZipFile(zip_file, mode="a", compression=ZIP_DEFLATED) as zf: - # zf.writestr(contentFile, '\n'+data.decode('utf-8')) + # zf.writestr(contentFile, '\n'+data.decode('utf-8')) # noqa: ERA001 zf.writestr(file_name, file_content) updated_zip_file = ZipFile(zip_file, mode="a") @@ -174,6 +167,6 @@ def update_file_content_in_zip(zip_file: Path, file_name: str, file_content: str logger.exception("misc.zip.update_file_content_in_zip failed") finally: os.close(file_handle) - os.remove(temp_name) + Path(temp_name).unlink(missing_ok=True) return updated_zip_file diff --git a/src/ospx/watch/cli/watchCosim.py b/src/ospx/watch/cli/watchCosim.py index dc35a01c..4b919d5e 100644 --- a/src/ospx/watch/cli/watchCosim.py +++ b/src/ospx/watch/cli/watchCosim.py @@ -1,15 +1,14 @@ #!/usr/bin/env python -# coding: utf-8 import argparse import logging import os import re import shutil +import sys from argparse import ArgumentParser from pathlib import Path from time import sleep -from typing import List, Union from ospx.utils.logging import configure_logging from ospx.watch.watchCosim import CosimWatcher @@ -38,7 +37,10 @@ def _argparser() -> argparse.ArgumentParser: "-c", "--converge", action="store_true", - help="watch convergence progress, finally --dump (reading watchDict and .csv, plotting convergence until no changes happen for 5s to any .csv)", + help=( + "watch convergence progress, finally --dump " + "(reading watchDict and .csv, plotting convergence until no changes happen for 5s to any .csv)" + ), default=False, required=False, ) @@ -134,12 +136,11 @@ def _argparser() -> argparse.ArgumentParser: return parser -def main(): +def main() -> None: """Entry point for console script as configured in setup.cfg. Runs the command line interface and parses arguments and options entered on the console. """ - parser = _argparser() args = parser.parse_args() @@ -150,7 +151,7 @@ def main(): log_level_console = "ERROR" if args.quiet else log_level_console log_level_console = "DEBUG" if args.verbose else log_level_console # ..to file - log_file: Union[Path, None] = Path(args.log) if args.log else None + log_file: Path | None = Path(args.log) if args.log else None log_level_file: str = args.log_level configure_logging(log_level_console, log_file, log_level_file) @@ -166,7 +167,7 @@ def main(): if not converge and not plot and not dump: logger.error("give at least one option what to do: --converge, --plot or --dump") parser.print_help() - exit(0) + sys.exit(0) # Dispatch to _main(), which takes care of processing the arguments and invoking the API. _main( @@ -181,8 +182,9 @@ def main(): ) -def _main( +def _main( # noqa: PLR0913 watch_dict_file_name: str, + *, converge: bool = False, plot: bool = False, dump: bool = False, @@ -190,22 +192,21 @@ def _main( latest_values: int = 0, scale_factor: float = 1, timeline_data: bool = False, -): +) -> None: """Entry point for unit tests. Processes the arguments parsed by main() on the console and invokes the API. """ - watch_dict_file = Path(watch_dict_file_name) if not watch_dict_file.is_file(): logger.error(f"file {watch_dict_file} not found.") return - csv_files: List[Path] = [] + csv_files: list[Path] = [] wait_counter: int = 0 - while wait_counter < 5: - csv_files = list(Path(".").glob("*.csv")) + while wait_counter < 5: # noqa: PLR2004 + csv_files = list(Path().glob("*.csv")) if csv_files: break if wait_counter == 0: @@ -219,7 +220,7 @@ def _main( logger.error("no csv files found.") return - csv_file_names: List[str] = sorted(file.name for file in csv_files) + csv_file_names: list[str] = sorted(file.name for file in csv_files) # From the csv file names, identify all data sources for which csv files have been written, # and save them as set. @@ -236,7 +237,13 @@ def _main( for data_source_name in data_source_names ] - watcher = CosimWatcher(latest_csv_file_names, skip_values, latest_values, scale_factor, timeline_data) + watcher = CosimWatcher( + csv_file_names=latest_csv_file_names, + skip_values=skip_values, + latest_values=latest_values, + scale_factor=scale_factor, + timeline_data=timeline_data, + ) watcher.read_watch_dict(watch_dict_file_name) Path(watcher.results_dir).mkdir(parents=True, exist_ok=True) diff --git a/src/ospx/watch/watchCosim.py b/src/ospx/watch/watchCosim.py index a871aaa9..74307870 100644 --- a/src/ospx/watch/watchCosim.py +++ b/src/ospx/watch/watchCosim.py @@ -1,433 +1,440 @@ -# pyright: reportUnknownMemberType=false -# pyright: reportArgumentType=false -# pyright: reportCallIssue=false - -import contextlib -import logging -import os -import re -from math import sqrt as sqrt -from pathlib import Path -from typing import Any, Dict, List, MutableMapping, MutableSequence, Union - -import matplotlib.pyplot as plt -import numpy as np -import pandas as pd -from dictIO import DictReader, DictWriter -from matplotlib import colormaps -from matplotlib.axes import Axes -from matplotlib.figure import Figure -from numpy import ndarray -from pandas import DataFrame - -from ospx.utils.plotting import create_meta_dict, save_figure - -logger = logging.getLogger(__name__) - - -class CosimWatcher: - """CosimWatcher allows to monitor a running simulation, - plot trends and dump simulation results into a resultDict file. - """ - - def __init__( - self, - csv_file_names: MutableSequence[str], - skip_values: int, - latest_values: int, - scale_factor: float, - timeline_data: bool, - ): - self.watch_dict_file: Union[Path, None] = None - self.watch_dict: MutableMapping[Any, Any] = {} - self.csv_file_names: MutableSequence[str] = csv_file_names - self.title: str = "watch" - self.delimiter: str = "," # default - self.data_sources: Dict[str, Dict[str, Union[List[int], List[str], int, str]]] = {} - self.results_dir: str = "results" - self.number_of_columns: int = 3 - self.number_of_subplots: int = 0 - self.skip_values: int = skip_values - self.latest_values: int = latest_values - self.scale_factor: float = scale_factor - self.timeline_data: bool = timeline_data - self.figure: Figure - self.terminate: bool = False - self.max_row: int = 0 - return - - def read_watch_dict(self, watch_dict_file: Union[str, os.PathLike[str]]): - """Read watchDict file. - - The watchDict file contains the parameters to be plotted. - - Parameters - ---------- - watch_dict_file : Union[str, os.PathLike[str]] - watchDict file. Contains the parameters to be plotted. - - Raises - ------ - FileNotFoundError - if watch_dict_file does not exist - """ - - # Make sure watch_dict_file argument is of type Path. If not, cast it to Path type. - watch_dict_file = watch_dict_file if isinstance(watch_dict_file, Path) else Path(watch_dict_file) - if not watch_dict_file.exists(): - logger.error(f"CosimWatcher: File {watch_dict_file} not found.") - raise FileNotFoundError(watch_dict_file) - - logger.info(f"Configure CosimWatcher with {watch_dict_file}..") - - self.watch_dict_file = watch_dict_file - - self.watch_dict = DictReader.read(Path(self.watch_dict_file), comments=False) - - # read datasources, if available. - # normally this part should be written by ospCaseBuilder entirely - self.data_sources = self.watch_dict["datasources"] - - if "delimiter" in self.watch_dict: - self.delimiter = self.watch_dict["delimiter"] - - if "simulation" in self.watch_dict: - self.title = f"{self.watch_dict_file.name}-{self.watch_dict['simulation']['name']}" - - self._define_data_source_properties_for_plotting() - return - - def plot(self, converge: bool = False): - """Plot trends. - - Plotting + convergence checker (future task) - - Parameters - ---------- - converge : bool, optional - if True, convergence is checked, by default False - """ - - self._initialize_plot() - - if converge: - terminate_loops = 0 - max_no_change_loops = 4 - else: - terminate_loops = 10 - max_no_change_loops = 0 - - df_row_size = 0 - - while True: # do as long as not interrupted - df = self._read_csv_files_into_dataframe() - - # cumulate counter for termination if no changes - if df_row_size == len(df): - terminate_loops += 1 - else: - terminate_loops = 0 - - df_row_size = len(df) - df_col_size = ( - len(list(df)) - 1 - ) # reduced by one because 1st col is time column, frl: check multiple datasources - - # axs = [None for x in range(df_col_size)] - axs: MutableSequence[Axes] = [] - plot: Axes - time_key = list(df)[0] - for index in range(df_col_size): - current_key = list(df)[index + 1] # 0 is time column and thus removed, frl: check multiple dataframes - - plot = self.figure.add_subplot(self.max_row, self.number_of_columns, index + 1) - - try: - _ = plot.plot( - time_key, - current_key, - linewidth=2, - color=colormaps["gist_rainbow"](index / self.number_of_subplots), - data=df[[time_key, current_key]], - ) - except (TypeError, ValueError): - pass - except Exception as e: - logger.exception(e) - - # subplot.set_title(currentKey, fontsize=10) - _ = plot.grid(color="#66aa88", linestyle="--") - _ = plot.xaxis.set_tick_params(labelsize=8) - _ = plot.yaxis.set_tick_params(labelsize=8) - _ = plot.legend(fontsize=8) - axs.append(plot) - # if isinstance(plot, Axes): - # axs.append(plot) - # else: - # raise TypeError( - # f"CosimWatcher.plot(): plot is of type {type(plot)}. Expected type was matplotlib.axes.Axes ." - # ) - - _ = self.figure.suptitle(self.title) - - if converge: - plt.show(block=False) - plt.pause(3) - - if terminate_loops >= max_no_change_loops: - save_figure( - self.figure, - extension="png", - path=self.results_dir, - title=self.title, - meta_dict=create_meta_dict(self.title), - ) - break - plt.clf() - - # @TODO: Implement keypress for termination - - return - - def dump(self): - """Write dataframe to dump.""" - - df = self._read_csv_files_into_dataframe() - - result_dict = {} - for header in list(df): - values: ndarray[Any, Any] = df[header].dropna().to_numpy() - _first_value: Any = values[0] - _last_value: Any = values[-1] - _mean: Union[float, str] = "None" - _stddev: Union[float, str] = "None" - _min: Union[float, str] = "None" - _max: Union[float, str] = "None" - with contextlib.suppress(TypeError): - _mean = np.mean(values) # type: ignore - with contextlib.suppress(TypeError): - _stddev = np.std(values) # type: ignore - with contextlib.suppress(TypeError): - _min = np.min(values) - with contextlib.suppress(TypeError): - _max = np.max(values) - result_dict[header] = { - "latestValue": _last_value, - "firstValue": _first_value, - "mean": _mean, - "stdev": _stddev, - "min": _min, - "max": _max, - } - if self.timeline_data: - result_dict[header].update({"values": values}) - - # debug - # result_dict.update({'_datasources':self.data_sources}) - result_dict_name = "-".join([self.title, "resultDict"]) - - target_file_path = Path.cwd() / self.results_dir / result_dict_name - DictWriter.write(result_dict, target_file_path, mode="w") - - dump_dict_name = "-".join([self.title, "dataFrame.dump"]) - target_file_path = Path.cwd() / self.results_dir / dump_dict_name - df.to_pickle(str(target_file_path.absolute()), compression="gzip") - return - - def _define_data_source_properties_for_plotting(self): - """Details out the properties of all data sources for plotting. - - Details out the properties of all data source, making sure they contain the following fields required for plotting - - file name - - column names (= variable names) - """ - - # pattern = re.compile(r"(^#|\s+\[.*?\]$)") - pattern = re.compile( - r"(^#{0,2}\s*|\s+\[.*?\]$)" - ) # frl 2023-11-07 remove all leading #'s and spaces and all trailing [.*]'s - - for ( - data_source_name, - data_source_properties, - ) in self.data_sources.items(): # loop over all data sources - for csv_file_name in self.csv_file_names: - if re.match(data_source_name, csv_file_name): # find the correct csv file - data_source_properties.update({"csvFile": csv_file_name}) - - # extract the header row from the csv file to determine the variable names - data_header: List[str] = [] - with open(csv_file_name, "r") as f: - data_header = f.readline().strip().split(self.delimiter) - if not data_header: - continue - - time_column: int = 0 # frl 2023-11-07 default first column - if "timeColumn" in data_source_properties and isinstance(data_source_properties["timeColumn"], int): - time_column = data_source_properties["timeColumn"] - - _time_name: str = data_header[time_column] - data_source_properties.update({"timeName": _time_name}) - _display_time_name: str = pattern.sub("", _time_name) - data_source_properties.update({"displayTimeName": _display_time_name}) - - data_columns: List[int] = [] - # read_only_shortlisted_columns: bool = False #flr 2023-11-07 greedy approach needs to be updated on demand - - read_only_shortlisted_columns = "dataColumns" in data_source_properties - if read_only_shortlisted_columns and ( - "dataColumns" in data_source_properties - and isinstance(data_source_properties["dataColumns"], List) - ): - data_columns = [int(col) for col in data_source_properties["dataColumns"]] - # else: frl 2023-11-07 simx heritage? - # # if columns were not explicitely specified in watch dict: - # # Read all columns except settings. - # columns.extend( - # index - # for index, col_name in enumerate(data_header) - # if not re.match(r"^(settings)", col_name) - # ) - - _column_names: List[str] = [data_header[column] for column in data_columns] - data_source_properties.update({"colNames": _column_names}) - _display_column_names: List[str] = [pattern.sub("", col_name) for col_name in _column_names] - # _display_column_names = ["Time", "StepCount"] + [ - _display_column_names = [ - data_source_name + "|" + col_name - for col_name in _display_column_names - # if col_name not in ["Time", "StepCount"] frl 2023-11-07 - ] - - data_source_properties.update({"displayColNames": _display_column_names}) - data_source_properties.update({"xColumn": time_column}) - data_source_properties.update({"yColumns": data_columns}) - - return - - def _initialize_plot(self): - """Initialize the plot. - - Collects data and sets plot header line - """ - self.figure = plt.figure(figsize=(16 * self.scale_factor, 9 * self.scale_factor), dpi=150) - # self.fig.tight_layout() # constraint_layout() - _ = self.figure.subplots_adjust( - left=0.1, - bottom=0.05, - right=0.95, - top=0.9, - wspace=0.2, - hspace=0.2, - ) - self.terminate = False - - df = ( - self._read_csv_files_into_dataframe() - ) # do it once to find the number of respective columns of all datasources - self.number_of_subplots = ( - len(list(df)) - 1 - ) # one of the columns is the abscissa, frl: check if this works for multiple datasources and merged time columns - - self.number_of_columns = int(sqrt(self.number_of_subplots - 1)) + 1 - self.max_row = int(self.number_of_subplots / self.number_of_columns - 0.1) + 1 - return - - def _read_csv_files_into_dataframe(self) -> DataFrame: - """Read all csv files into one joint Pandas dataframe. - - Read all csv files (=all data sources, one csv file per data source) into one joint Pandas dataframe. - The returned dataframe hence contains the data of all datas ources. - This dataframe can then be used for plotting and to dump a pickle. - - Returns - ------- - pandas.core.frame.DataFrame - Pandas dataframe containing the data of all csv files - """ - - df_all_data_sources = pd.DataFrame() # initialize empty df - - for _, data_source_properties in self.data_sources.items(): - # mapping dict for display column names - column_name_to_display_column_name_mapping: Dict[str, str] = dict( - zip( - [data_source_properties["timeName"]] + data_source_properties["colNames"], # type: ignore - [data_source_properties["displayTimeName"]] + data_source_properties["displayColNames"], # type: ignore - ) - ) - - _column_names: List[str] = [] - if "colNames" in data_source_properties and isinstance(data_source_properties["colNames"], List): - _column_names = [str(col_name) for col_name in data_source_properties["colNames"]] - if "timeName" in data_source_properties and isinstance(data_source_properties["timeName"], str): - _column_names = [data_source_properties["timeName"]] + _column_names - - if "csvFile" in data_source_properties and isinstance(data_source_properties["csvFile"], str): - df_single_data_source: DataFrame - df_single_data_source = pd.read_csv( - Path(data_source_properties["csvFile"]), - usecols=_column_names, - ) - - df_single_data_source = df_single_data_source.rename(columns=column_name_to_display_column_name_mapping) - - if df_all_data_sources.empty: - # first df inherit all columns from single df - df_all_data_sources = df_single_data_source - else: - # all subsequent merge row-wise by time column, - # ignoring index - # (after setting individual time steps for each individual component) - - # concatenate column-wise - # df_all_data_sources = pd.concat([df_all_data_sources, df_single_data_source], axis=1) - - # df_all_data_sources = pd.concat([df_all_data_sources, df_single_data_source], ignore_index=True) - df_all_data_sources = pd.concat( - [df_all_data_sources, df_single_data_source] - ) # frl check for duplicated timeName columns for multiple datasources - - # potential solution - # interpolating non-matching time data - # otherwise should component-wise dataframes do a better job - # bypass StepCound yielding in big holes, not plotted by mpl. - # df_all_data_sources = pd.merge_asof( - # df_all_data_sources, - # df_single_data_source, - # on = 'Time', - # by = 'StepCount', - # direction = 'nearest', - # #tolerance = pd.Timedelta('1ms') - # ) - - # find latest common start point for skip and latest - # consider skipping negative values due to wrong inputs - start: int = 0 - if df_all_data_sources.shape[0] - self.skip_values < 0: # safety - logger.error(f"there will be no data, consider adjusting --skip: {self.skip_values}") - # cases - if self.skip_values > 0 and self.latest_values > 0: - start = max(self.skip_values, df_all_data_sources.shape[0] - self.latest_values) - elif self.skip_values > 0 and self.latest_values == 0: - start = self.skip_values - elif self.latest_values > 0 and self.skip_values == 0: - start = df_all_data_sources.shape[0] - self.latest_values - else: - start = 0 - - # if skip latest n steps is to be implemented, no changes to start, but an additional command option is required - length: int = df_all_data_sources.shape[0] - - return df_all_data_sources.iloc[start:length, :] - - def _determine_optimum_screen_size(self): - """Determine the optimum screen size.""" - # Opening and closing of window may be deprecated when a better solution is found - mgr = plt.get_current_fig_manager() - if mgr is None: - return - mgr.full_screen_toggle() - self.screenSize = (mgr.canvas.width(), mgr.canvas.height()) # type: ignore - mgr.window.close() # type: ignore - return +# pyright: reportUnknownMemberType=false +# pyright: reportArgumentType=false +# pyright: reportCallIssue=false +# ruff: noqa: ERA001 + +import contextlib +import logging +import os +import re +from collections.abc import MutableMapping, MutableSequence, Sequence +from math import sqrt +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +from dictIO import DictReader, DictWriter +from matplotlib import colormaps +from numpy import ndarray +from pandas import DataFrame + +from ospx.utils.plotting import create_meta_dict, save_figure + +if TYPE_CHECKING: + from matplotlib.axes import Axes + from matplotlib.figure import Figure + +logger = logging.getLogger(__name__) + + +class CosimWatcher: + """CosimWatcher allows to monitor a running simulation, + plot trends and dump simulation results into a resultDict file. + """ + + def __init__( + self, + csv_file_names: MutableSequence[str], + skip_values: int, + latest_values: int, + scale_factor: float, + *, + timeline_data: bool, + ) -> None: + self.watch_dict_file: Path | None = None + self.watch_dict: MutableMapping[Any, Any] = {} + self.csv_file_names: MutableSequence[str] = csv_file_names + self.title: str = "watch" + self.delimiter: str = "," # default + self.data_sources: dict[str, dict[str, list[int] | list[str] | int | str]] = {} + self.results_dir: str = "results" + self.number_of_columns: int = 3 + self.number_of_subplots: int = 0 + self.skip_values: int = skip_values + self.latest_values: int = latest_values + self.scale_factor: float = scale_factor + self.timeline_data: bool = timeline_data + self.figure: Figure + self.terminate: bool = False + self.max_row: int = 0 + self.screenSize: tuple[float, float] + return + + def read_watch_dict( + self, + watch_dict_file: str | os.PathLike[str], + ) -> None: + """Read watchDict file. + + The watchDict file contains the parameters to be plotted. + + Parameters + ---------- + watch_dict_file : Union[str, os.PathLike[str]] + watchDict file. Contains the parameters to be plotted. + + Raises + ------ + FileNotFoundError + if watch_dict_file does not exist + """ + # Make sure watch_dict_file argument is of type Path. If not, cast it to Path type. + watch_dict_file = watch_dict_file if isinstance(watch_dict_file, Path) else Path(watch_dict_file) + if not watch_dict_file.exists(): + logger.error(f"CosimWatcher: File {watch_dict_file} not found.") + raise FileNotFoundError(watch_dict_file) + + logger.info(f"Configure CosimWatcher with {watch_dict_file}..") + + self.watch_dict_file = watch_dict_file + + self.watch_dict = DictReader.read(Path(self.watch_dict_file), comments=False) + + # read datasources, if available. + # normally this part should be written by ospCaseBuilder entirely + self.data_sources = self.watch_dict["datasources"] + + if "delimiter" in self.watch_dict: + self.delimiter = self.watch_dict["delimiter"] + + if "simulation" in self.watch_dict: + self.title = f"{self.watch_dict_file.name}-{self.watch_dict['simulation']['name']}" + + self._define_data_source_properties_for_plotting() + return + + def plot( + self, + *, + converge: bool = False, + ) -> None: + """Plot trends. + + Plotting + convergence checker (future task) + + Parameters + ---------- + converge : bool, optional + if True, convergence is checked, by default False + """ + self._initialize_plot() + + if converge: + terminate_loops = 0 + max_no_change_loops = 4 + else: + terminate_loops = 10 + max_no_change_loops = 0 + + df_row_size = 0 + + while True: # do as long as not interrupted + data = self._read_csv_files_into_dataframe() + + # cumulate counter for termination if no changes + if df_row_size == len(data): + terminate_loops += 1 + else: + terminate_loops = 0 + + df_row_size = len(data) + df_col_size = len(list(data)) - 1 # reduced by one because 1st col is time column + + # axs = [None for x in range(df_col_size)] + axs: MutableSequence[Axes] = [] + plot: Axes + time_key: Sequence[float] = list(data)[0] # type: ignore[assignment, reportAssignmentType] # noqa: RUF015 + for index in range(df_col_size): + # 0 is time column and thus removed + current_key: Sequence[float] = list(data)[index + 1] # type: ignore[assignment, reportAssignmentType] + + plot = self.figure.add_subplot(self.max_row, self.number_of_columns, index + 1) + + try: + _ = plot.plot( + time_key, + current_key, + linewidth=2, + color=colormaps["gist_rainbow"](index / self.number_of_subplots), + data=data[[time_key, current_key]], + ) + except (TypeError, ValueError): + pass + except Exception: + logger.exception("CosimWatcher.plot(): An error occurred while plotting.") + + # subplot.set_title(currentKey, fontsize=10) + plot.grid(color="#66aa88", linestyle="--") + plot.xaxis.set_tick_params(labelsize=8) + plot.yaxis.set_tick_params(labelsize=8) + _ = plot.legend(fontsize=8) + axs.append(plot) + # if isinstance(plot, Axes): + # axs.append(plot) + # else: + # raise TypeError( + # f"CosimWatcher.plot(): plot is of type {type(plot)}. Expected type was matplotlib.axes.Axes ." + # ) + + _ = self.figure.suptitle(self.title) + + if converge: + plt.show(block=False) + plt.pause(3) + + if terminate_loops >= max_no_change_loops: + save_figure( + self.figure, + extension="png", + path=self.results_dir, + title=self.title, + meta_dict=create_meta_dict(self.title), + ) + break + plt.clf() + + # @TODO: Implement keypress for termination + + return + + def dump(self) -> None: + """Write dataframe to dump.""" + data = self._read_csv_files_into_dataframe() + + result_dict = {} + for header in list(data): + values: ndarray[Any, Any] = data[header].dropna().to_numpy() + _first_value: Any = values[0] + _last_value: Any = values[-1] + _mean: float | str = "None" + _stddev: float | str = "None" + _min: float | str = "None" + _max: float | str = "None" + with contextlib.suppress(TypeError): + _mean = float(np.mean(values)) + with contextlib.suppress(TypeError): + _stddev = float(np.std(values)) + with contextlib.suppress(TypeError): + _min = float(np.min(values)) + with contextlib.suppress(TypeError): + _max = float(np.max(values)) + result_dict[header] = { + "latestValue": _last_value, + "firstValue": _first_value, + "mean": _mean, + "stdev": _stddev, + "min": _min, + "max": _max, + } + if self.timeline_data: + result_dict[header].update({"values": values}) + + # debug + # result_dict.update({'_datasources':self.data_sources}) + result_dict_name = f"{self.title}-resultDict" + + target_file_path = Path.cwd() / self.results_dir / result_dict_name + DictWriter.write(result_dict, target_file_path, mode="w") + + dump_dict_name = f"{self.title}-dataFrame.dump" + target_file_path = Path.cwd() / self.results_dir / dump_dict_name + data.to_pickle(str(target_file_path.absolute()), compression="gzip") + return + + def _define_data_source_properties_for_plotting(self) -> None: + """Details out the properties of all data sources for plotting. + + Details out the properties of all data source, making sure they contain + the following fields required for plotting: + - file name + - column names (= variable names) + """ + # Remove all leading #'s and spaces and all trailing [.*]'s + pattern = re.compile(r"(^#{0,2}\s*|\s+\[.*?\]$)") + + for ( + data_source_name, + data_source_properties, + ) in self.data_sources.items(): # loop over all data sources + for csv_file_name in self.csv_file_names: + if re.match(data_source_name, csv_file_name): # find the correct csv file + data_source_properties.update({"csvFile": csv_file_name}) + + # extract the header row from the csv file to determine the variable names + data_header: list[str] = [] + with Path(csv_file_name).open() as f: + data_header = f.readline().strip().split(self.delimiter) + if not data_header: + continue + + time_column: int = 0 # frl 2023-11-07 default first column + if "timeColumn" in data_source_properties and isinstance(data_source_properties["timeColumn"], int): + time_column = data_source_properties["timeColumn"] + + _time_name: str = data_header[time_column] + data_source_properties.update({"timeName": _time_name}) + _display_time_name: str = pattern.sub("", _time_name) + data_source_properties.update({"displayTimeName": _display_time_name}) + + data_columns: list[int] = [] + read_only_shortlisted_columns: bool + # NOTE: Greedy approach needs to be updated on demand; hence commnted out. @FRALUM, 2023-11-07 + # read_only_shortlisted_columns = False + + read_only_shortlisted_columns = "dataColumns" in data_source_properties + if read_only_shortlisted_columns and ( + "dataColumns" in data_source_properties + and isinstance(data_source_properties["dataColumns"], list) + ): + data_columns = [int(col) for col in data_source_properties["dataColumns"]] + # else: frl 2023-11-07 simx heritage? + # # if columns were not explicitely specified in watch dict: + # # Read all columns except settings. + # columns.extend( + # index + # for index, col_name in enumerate(data_header) + # if not re.match(r"^(settings)", col_name) + # ) + + _column_names: list[str] = [data_header[column] for column in data_columns] + data_source_properties.update({"colNames": _column_names}) + _display_column_names: list[str] = [pattern.sub("", col_name) for col_name in _column_names] + # _display_column_names = ["Time", "StepCount"] + [ + _display_column_names = [ + data_source_name + "|" + col_name + for col_name in _display_column_names + # if col_name not in ["Time", "StepCount"] frl 2023-11-07 + ] + + data_source_properties.update({"displayColNames": _display_column_names}) + data_source_properties.update({"xColumn": time_column}) + data_source_properties.update({"yColumns": data_columns}) + + return + + def _initialize_plot(self) -> None: + """Initialize the plot. + + Collects data and sets plot header line + """ + self.figure = plt.figure(figsize=(16 * self.scale_factor, 9 * self.scale_factor), dpi=150) + # self.fig.tight_layout() # constraint_layout() + self.figure.subplots_adjust( + left=0.1, + bottom=0.05, + right=0.95, + top=0.9, + wspace=0.2, + hspace=0.2, + ) + self.terminate = False + + # do it once to find the number of respective columns of all datasources + data = self._read_csv_files_into_dataframe() + # one of the columns is the abscissa, frl: check if this works for multiple datasources and merged time columns + self.number_of_subplots = len(list(data)) - 1 + + self.number_of_columns = int(sqrt(self.number_of_subplots - 1)) + 1 + self.max_row = int(self.number_of_subplots / self.number_of_columns - 0.1) + 1 + return + + def _read_csv_files_into_dataframe(self) -> DataFrame: + """Read all csv files into one joint Pandas dataframe. + + Read all csv files (=all data sources, one csv file per data source) into one joint Pandas dataframe. + The returned dataframe hence contains the data of all datas ources. + This dataframe can then be used for plotting and to dump a pickle. + + Returns + ------- + pandas.core.frame.DataFrame + Pandas dataframe containing the data of all csv files + """ + df_all_data_sources = pd.DataFrame() # initialize empty df + + for data_source_properties in self.data_sources.values(): + # mapping dict for display column names + column_name_to_display_column_name_mapping: dict[str, str] = dict( + zip( + [data_source_properties["timeName"]] + data_source_properties["colNames"], # type: ignore[arg-type, operator, reportOperatorIssue] + [data_source_properties["displayTimeName"]] + data_source_properties["displayColNames"], # type: ignore[arg-type, operator, reportOperatorIssue] + strict=False, + ) + ) + + _column_names: list[str] = [] + if "colNames" in data_source_properties and isinstance(data_source_properties["colNames"], list): + _column_names = [str(col_name) for col_name in data_source_properties["colNames"]] + if "timeName" in data_source_properties and isinstance(data_source_properties["timeName"], str): + _column_names = [data_source_properties["timeName"], *_column_names] + + if "csvFile" in data_source_properties and isinstance(data_source_properties["csvFile"], str): + df_single_data_source: DataFrame + df_single_data_source = pd.read_csv( + Path(data_source_properties["csvFile"]), + usecols=_column_names, + ) + + df_single_data_source = df_single_data_source.rename(columns=column_name_to_display_column_name_mapping) + + if df_all_data_sources.empty: + # first df inherit all columns from single df + df_all_data_sources = df_single_data_source + else: + # all subsequent merge row-wise by time column, + # ignoring index + # (after setting individual time steps for each individual component) + + # concatenate column-wise + # df_all_data_sources = pd.concat([df_all_data_sources, df_single_data_source], axis=1) + + # df_all_data_sources = pd.concat([df_all_data_sources, df_single_data_source], ignore_index=True) + df_all_data_sources = pd.concat( + [df_all_data_sources, df_single_data_source] + ) # frl check for duplicated timeName columns for multiple datasources + + # potential solution + # interpolating non-matching time data + # otherwise should component-wise dataframes do a better job + # bypass StepCound yielding in big holes, not plotted by mpl. + # df_all_data_sources = pd.merge_asof( + # df_all_data_sources, + # df_single_data_source, + # on = 'Time', + # by = 'StepCount', + # direction = 'nearest', + # #tolerance = pd.Timedelta('1ms') + # ) + + # find latest common start point for skip and latest + # consider skipping negative values due to wrong inputs + start: int = 0 + if df_all_data_sources.shape[0] - self.skip_values < 0: # safety + logger.error(f"there will be no data, consider adjusting --skip: {self.skip_values}") + # cases + if self.skip_values > 0 and self.latest_values > 0: + start = max(self.skip_values, df_all_data_sources.shape[0] - self.latest_values) + elif self.skip_values > 0 and self.latest_values == 0: + start = self.skip_values + elif self.latest_values > 0 and self.skip_values == 0: + start = df_all_data_sources.shape[0] - self.latest_values + else: + start = 0 + + # if skip latest n steps is to be implemented, no changes to start, but an additional command option is required + length: int = df_all_data_sources.shape[0] + + return df_all_data_sources.iloc[start:length, :] + + def _determine_optimum_screen_size(self) -> None: + """Determine the optimum screen size.""" + # Opening and closing of window may be deprecated when a better solution is found + mgr = plt.get_current_fig_manager() + if mgr is None: + return + mgr.full_screen_toggle() + self.screenSize = (mgr.canvas.width(), mgr.canvas.height()) # type: ignore[attr-defined, reportAttributeAccessIssue] + mgr.window.close() # type: ignore[attr-defined, reportAttributeAccessIssue] + return diff --git a/tests/conftest.py b/tests/conftest.py index 99494aed..f7ee17b7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,16 +10,24 @@ @pytest.fixture(scope="package", autouse=True) def chdir() -> None: + """ + Fixture that changes the current working directory to the 'test_working_directory' folder. + This fixture is automatically used for the entire package. + """ os.chdir(Path(__file__).parent.absolute() / "test_dicts") @pytest.fixture(scope="package", autouse=True) def test_dir() -> Path: + """ + Fixture that returns the absolute path of the directory containing the current file. + This fixture is automatically used for the entire package. + """ return Path(__file__).parent.absolute() -output_dirs = [] -output_files = [ +output_dirs: list[str] = [] +output_files: list[str] = [ "parsed*", "*.xml", "*.fmu", @@ -33,6 +41,10 @@ def test_dir() -> Path: @pytest.fixture(autouse=True) def default_setup_and_teardown(): + """ + Fixture that performs setup and teardown actions before and after each test function. + It removes the output directories and files specified in 'output_dirs' and 'output_files' lists. + """ _remove_output_dirs_and_files() _create_test_fmu() yield @@ -41,6 +53,9 @@ def default_setup_and_teardown(): def _remove_output_dirs_and_files() -> None: + """ + Helper function that removes the output directories and files specified in 'output_dirs' and 'output_files' lists. + """ for folder in output_dirs: rmtree(folder, ignore_errors=True) for pattern in output_files: @@ -50,10 +65,10 @@ def _remove_output_dirs_and_files() -> None: _file.unlink(missing_ok=True) -def _create_test_fmu(): +def _create_test_fmu() -> None: model_description_file: Path = Path("test_fmu_modelDescription.xml") model_description: str = "" - with open(model_description_file, "r") as f: + with Path.open(model_description_file) as f: model_description = f.read() fmu_file: Path = Path("test_fmu.fmu") fmu_file.unlink(missing_ok=True) @@ -64,16 +79,21 @@ def _create_test_fmu(): ) -def _remove_test_fmu(): +def _remove_test_fmu() -> None: Path("test_fmu.fmu").unlink() @pytest.fixture(autouse=True) def setup_logging(caplog: pytest.LogCaptureFixture) -> None: + """ + Fixture that sets up logging for each test function. + It sets the log level to 'INFO' and clears the log capture. + """ caplog.set_level("INFO") caplog.clear() @pytest.fixture(autouse=True) def logger() -> logging.Logger: + """Fixture that returns the logger object.""" return logging.getLogger() diff --git a/tests/test_dicts/test_fmu.fmu b/tests/test_dicts/test_fmu.fmu index 063aa72da9b50624958a5d708d030eb3e7296afe..7c593b389da2abb4f38b0373a9aff8f58c3d966b 100644 GIT binary patch delta 32 lcmX@jd76_iz?+#xgn@y9gTbvfG;$+fAqxwLF}az=0|0uV2k`&^ delta 32 lcmX@jd76_iz?+#xgn@y9gTb__G<+jpAqxwLF}az=0|0w12n7HD diff --git a/tests/test_fmu.py b/tests/test_fmu.py index b7bdb568..9f2a1943 100644 --- a/tests/test_fmu.py +++ b/tests/test_fmu.py @@ -1,56 +1,56 @@ -from pathlib import Path - -import pytest - -from ospx.fmi.fmu import FMU - - -def test_conftest_create_test_fmu(): - pass - - -def test_fmu_instantiation(): - # Prepare - fmu_file: Path = Path("test_fmu.fmu") - # Execute - fmu = FMU(fmu_file) - # Assert - assert isinstance(fmu, FMU) - - -@pytest.fixture() -def test_fmu() -> FMU: - fmu_file: Path = Path("test_fmu.fmu") - return FMU(fmu_file) - - -def test_fmu_variables_number(test_fmu: FMU): - assert len(test_fmu.variables) == 34 - - -def test_fmu_units_number(test_fmu: FMU): - assert len(test_fmu.units) == 0 - - -def test_fmu_variables_fmi_data_type(test_fmu: FMU): - assert test_fmu.variables["Variable_1_IN_Real"].data_type == "Real" - assert isinstance(test_fmu.variables["Variable_1_IN_Real"].start, float) - assert test_fmu.variables["Variable_2_IN_Integer"].data_type == "Integer" - assert test_fmu.variables["Variable_3_IN_Bool"].data_type == "Boolean" - assert test_fmu.variables["Variable_4_OUT_Real"].data_type == "Real" - assert test_fmu.variables["Variable_5_OUT_Integer"].data_type == "Integer" - assert test_fmu.variables["Variable_6_OUT_Bool"].data_type == "Boolean" - - -def test_fmu_variables_start_value(test_fmu: FMU): - assert test_fmu.variables["Vector_1_IN[0]"].start == 10.0 - assert test_fmu.variables["Vector_1_IN[1]"].start == 11.0 - assert test_fmu.variables["Vector_1_IN[2]"].start == 12.0 - - -# def test_fmu(): -# Prepare - -# Execute - -# Assert +from pathlib import Path + +import pytest + +from ospx.fmi.fmu import FMU + + +def test_conftest_create_test_fmu() -> None: + pass + + +def test_fmu_instantiation() -> None: + # Prepare + fmu_file: Path = Path("test_fmu.fmu") + # Execute + fmu = FMU(fmu_file) + # Assert + assert isinstance(fmu, FMU) + + +@pytest.fixture +def test_fmu() -> FMU: + fmu_file: Path = Path("test_fmu.fmu") + return FMU(fmu_file) + + +def test_fmu_variables_number(test_fmu: FMU) -> None: + assert len(test_fmu.variables) == 34 + + +def test_fmu_units_number(test_fmu: FMU) -> None: + assert len(test_fmu.units) == 0 + + +def test_fmu_variables_fmi_data_type(test_fmu: FMU) -> None: + assert test_fmu.variables["Variable_1_IN_Real"].data_type == "Real" + assert isinstance(test_fmu.variables["Variable_1_IN_Real"].start, float) + assert test_fmu.variables["Variable_2_IN_Integer"].data_type == "Integer" + assert test_fmu.variables["Variable_3_IN_Bool"].data_type == "Boolean" + assert test_fmu.variables["Variable_4_OUT_Real"].data_type == "Real" + assert test_fmu.variables["Variable_5_OUT_Integer"].data_type == "Integer" + assert test_fmu.variables["Variable_6_OUT_Bool"].data_type == "Boolean" + + +def test_fmu_variables_start_value(test_fmu: FMU) -> None: + assert test_fmu.variables["Vector_1_IN[0]"].start == 10.0 + assert test_fmu.variables["Vector_1_IN[1]"].start == 11.0 + assert test_fmu.variables["Vector_1_IN[2]"].start == 12.0 + + +# def test_fmu() -> None: +# Prepare + +# Execute + +# Assert diff --git a/tests/test_ospCaseBuilder.py b/tests/test_ospCaseBuilder.py index ced5cea7..4582d031 100644 --- a/tests/test_ospCaseBuilder.py +++ b/tests/test_ospCaseBuilder.py @@ -1,53 +1,53 @@ -from pathlib import Path - -from dictIO import DictParser - -from ospx import OspCaseBuilder - - -def test_build(): - # sourcery skip: extract-duplicate-method - # Prepare - case_dict_file = Path("test_caseDict_simple") - parsed_case_dict_file = Path(f"parsed.{case_dict_file.name}") - _ = DictParser.parse(case_dict_file) - # Execute - OspCaseBuilder.build(case_dict_file=parsed_case_dict_file) - # Assert - # fmu files - assert not Path("constantVal.fmu").exists() - assert not Path("difference.fmu").exists() - assert not Path("quotient.fmu").exists() - assert not Path("dividend.fmu").exists() - assert not Path("subtrahend.fmu").exists() - assert not Path("minuend.fmu").exists() - # ModelDescription files-> should NOT have been written - assert not Path("constantVal_ModelDescription.xml").exists() - assert not Path("difference_ModelDescription.xml").exists() - assert not Path("quotient_ModelDescription.xml").exists() - assert not Path("dividend_ModelDescription.xml").exists() - assert not Path("subtrahend_ModelDescription.xml").exists() - assert not Path("minuend_ModelDescription.xml").exists() - # OspModelDescription files - assert not Path("constantVal_OspModelDescription.xml").exists() - assert not Path("difference_OspModelDescription.xml").exists() - assert not Path("quotient_OspModelDescription.xml").exists() - assert not Path("dividend_OspModelDescription.xml").exists() - assert not Path("subtrahend_OspModelDescription.xml").exists() - assert not Path("minuend_OspModelDescription.xml").exists() - # SystemStructure files - assert Path("OspSystemStructure.xml").exists() - assert Path("SystemStructure.ssd").exists() - # statisticsDict and watchDict - assert Path("statisticsDict").exists() - assert Path("watchDict").exists() - - -def test_inspect(): - # Prepare - case_dict_file = Path("test_caseDict") - parsed_case_dict_file = Path(f"parsed.{case_dict_file.name}") - _ = DictParser.parse(case_dict_file) - # Execute - OspCaseBuilder.build(case_dict_file=parsed_case_dict_file, inspect=True) - # Assert +from pathlib import Path + +from dictIO import DictParser + +from ospx import OspCaseBuilder + + +def test_build() -> None: + # sourcery skip: extract-duplicate-method + # Prepare + case_dict_file = Path("test_caseDict_simple") + parsed_case_dict_file = Path(f"parsed.{case_dict_file.name}") + _ = DictParser.parse(case_dict_file) + # Execute + OspCaseBuilder.build(case_dict_file=parsed_case_dict_file) + # Assert + # fmu files + assert not Path("constantVal.fmu").exists() + assert not Path("difference.fmu").exists() + assert not Path("quotient.fmu").exists() + assert not Path("dividend.fmu").exists() + assert not Path("subtrahend.fmu").exists() + assert not Path("minuend.fmu").exists() + # ModelDescription files-> should NOT have been written + assert not Path("constantVal_ModelDescription.xml").exists() + assert not Path("difference_ModelDescription.xml").exists() + assert not Path("quotient_ModelDescription.xml").exists() + assert not Path("dividend_ModelDescription.xml").exists() + assert not Path("subtrahend_ModelDescription.xml").exists() + assert not Path("minuend_ModelDescription.xml").exists() + # OspModelDescription files + assert not Path("constantVal_OspModelDescription.xml").exists() + assert not Path("difference_OspModelDescription.xml").exists() + assert not Path("quotient_OspModelDescription.xml").exists() + assert not Path("dividend_OspModelDescription.xml").exists() + assert not Path("subtrahend_OspModelDescription.xml").exists() + assert not Path("minuend_OspModelDescription.xml").exists() + # SystemStructure files + assert Path("OspSystemStructure.xml").exists() + assert Path("SystemStructure.ssd").exists() + # statisticsDict and watchDict + assert Path("statisticsDict").exists() + assert Path("watchDict").exists() + + +def test_inspect() -> None: + # Prepare + case_dict_file = Path("test_caseDict") + parsed_case_dict_file = Path(f"parsed.{case_dict_file.name}") + _ = DictParser.parse(case_dict_file) + # Execute + OspCaseBuilder.build(case_dict_file=parsed_case_dict_file, inspect=True) + # Assert diff --git a/tests/test_watchCosim.py b/tests/test_watchCosim.py index f0630d88..690aa646 100644 --- a/tests/test_watchCosim.py +++ b/tests/test_watchCosim.py @@ -1,48 +1,47 @@ -from pathlib import Path -from typing import List - -import pytest - -from ospx.watch.watchCosim import CosimWatcher - - -def test_file_not_found_exception(): - # Prepare - source_file: Path = Path("this_file_does_not_exist") - csv_file_names: List[str] = [] - skip_values: int = 0 - latest_values: int = 0 - watcher = CosimWatcher( - csv_file_names, - skip_values, - latest_values, - scale_factor=1.0, - timeline_data=False, - ) - # Execute and Assert - with pytest.raises(FileNotFoundError): - watcher.read_watch_dict(source_file) - - -def test_read_watch_dict(): - # Prepare - source_file = Path("test_watchDict") - csv_file_names = [ - "test_result_file_difference.csv", - "test_result_file_divident.csv", - "test_result_file_minuend.csv", - "test_result_file_quotient.csv", - "test_result_file_subtrahend.csv", - ] - skip_values = 0 - latest_values = 0 - watcher = CosimWatcher( - csv_file_names, - skip_values, - latest_values, - scale_factor=1.0, - timeline_data=False, - ) - # Execute - watcher.read_watch_dict(source_file) - # Assert +from pathlib import Path + +import pytest + +from ospx.watch.watchCosim import CosimWatcher + + +def test_file_not_found_exception() -> None: + # Prepare + source_file: Path = Path("this_file_does_not_exist") + csv_file_names: list[str] = [] + skip_values: int = 0 + latest_values: int = 0 + watcher = CosimWatcher( + csv_file_names, + skip_values, + latest_values, + scale_factor=1.0, + timeline_data=False, + ) + # Execute and Assert + with pytest.raises(FileNotFoundError): + watcher.read_watch_dict(source_file) + + +def test_read_watch_dict() -> None: + # Prepare + source_file = Path("test_watchDict") + csv_file_names = [ + "test_result_file_difference.csv", + "test_result_file_divident.csv", + "test_result_file_minuend.csv", + "test_result_file_quotient.csv", + "test_result_file_subtrahend.csv", + ] + skip_values = 0 + latest_values = 0 + watcher = CosimWatcher( + csv_file_names, + skip_values, + latest_values, + scale_factor=1.0, + timeline_data=False, + ) + # Execute + watcher.read_watch_dict(source_file) + # Assert diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 52296a52..00000000 --- a/tox.ini +++ /dev/null @@ -1,27 +0,0 @@ -[tox] -isolated_build = True -envlist = py{39,310,311,312}-{linux,macos,windows} -# envlist = py{39,310,311,312}-{windows} -# envlist = py{39,310,311,312} - -[coverage:paths] -source = - src/ospx - */site-packages/ospx - -[coverage:run] -source = ospx -branch = True - -[coverage:report] -fail_under = 35.0 -show_missing = True -skip_covered = True - -[testenv] -system_site_packages = True -deps = - pytest>=8.3 - pytest-cov>=5.0 -commands = - pytest --cov --cov-config tox.ini {posargs} From 8425e42172a2a432f37cf4c2ee16fd214e640fc3 Mon Sep 17 00:00:00 2001 From: Claas Date: Tue, 22 Oct 2024 20:56:28 +0200 Subject: [PATCH 14/32] src/ospx/utils/dict.py : changed back from `literal_eval()` to `eval()` --- src/ospx/utils/dict.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/ospx/utils/dict.py b/src/ospx/utils/dict.py index 8faa4943..e669fbd1 100644 --- a/src/ospx/utils/dict.py +++ b/src/ospx/utils/dict.py @@ -1,5 +1,4 @@ import re -from ast import literal_eval from collections import OrderedDict from collections.abc import MutableMapping from typing import Any @@ -41,7 +40,7 @@ def shrink_dict(dict_in: MutableMapping[Any, Any], unique_key: list[str] | None _unique_key: list[str] = unique_key or [] unique_keys_string: str = "['" + "']['".join(_unique_key) + "']" # sort an ordered dict for attribute (child) where the dict is to make unique for - eval_string: str = f"sorted(dict.items(), key=lambda x: str(x[1]{unique_keys_string}))" + eval_string: str = f"sorted(dict_in.items(), key=lambda x: str(x[1]{unique_keys_string}))" # Identify doublettes and collect them for subsequent removal seen: set[Any] = set() @@ -49,10 +48,10 @@ def shrink_dict(dict_in: MutableMapping[Any, Any], unique_key: list[str] | None # value is necessary here as it is used in the eval statements below. Do not delete it. for key, value in OrderedDict(eval(eval_string)).items(): # noqa: B007, PERF102, S307 - proove_value = literal_eval(f"value{unique_keys_string}") + proove_value = eval(f"value{unique_keys_string}") # noqa: S307 if proove_value in seen: remove_key.append(key) else: - seen.add(literal_eval(f"value{unique_keys_string}")) + seen.add(eval(f"value{unique_keys_string}")) # noqa: S307 return {key: dict_in[key] for key in dict_in if key not in remove_key} From 2519e66ccb746cfd4efd4abcff3e31ee1e4fca6e Mon Sep 17 00:00:00 2001 From: Claas Date: Tue, 22 Oct 2024 21:15:08 +0200 Subject: [PATCH 15/32] ruff.toml : reactivated several docstring rules --- ruff.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index 79708cc6..ab5d823e 100644 --- a/ruff.toml +++ b/ruff.toml @@ -21,7 +21,10 @@ select = [ ] ignore = [ # Ruff lint rules temporarily ignored, but which should be reactivated and resolved in the future. - "D", # Missing docstrings <- @TODO: reactivate and resolve docstring issues @CLAROS, 2024-10-21 + "D100", # Missing docstring in public module <- @TODO: reactivate and resolve docstring issues @CLAROS, 2024-10-21 + "D104", # Missing docstring in public package <- @TODO: reactivate and resolve docstring issues @CLAROS, 2024-10-21 + "D105", # Missing docstring in magic method <- @TODO: reactivate and resolve docstring issues @CLAROS, 2024-10-21 + "D107", # Missing docstring in __init__ <- @TODO: reactivate and resolve docstring issues @CLAROS, 2024-10-21 "N999", # Invalid module name <- @TODO: reactivate and resolve @CLAROS, 2024-10-21 "C901", # Function is too complex <- @TODO: reactivate and resolve print statements @CLAROS, 2024-10-21 "PLR0911", # Too many return statements <- @TODO: reactivate and resolve @CLAROS, 2024-10-21 From ebb50c341029932b080fd63097846f61dde2cfc2 Mon Sep 17 00:00:00 2001 From: Claas Date: Tue, 22 Oct 2024 21:15:54 +0200 Subject: [PATCH 16/32] Corrected docstring formatting issues --- src/ospx/component.py | 4 +++- src/ospx/connector.py | 4 +++- src/ospx/fmi/variable.py | 1 + src/ospx/graph.py | 4 +--- src/ospx/importer.py | 4 +--- src/ospx/ospSimulationCase.py | 5 +++-- src/ospx/simulation.py | 2 ++ src/ospx/system.py | 15 ++++++--------- src/ospx/utils/dict.py | 4 +++- src/ospx/utils/zip.py | 36 +++++++++++++++++------------------ src/ospx/watch/watchCosim.py | 4 +++- 11 files changed, 44 insertions(+), 39 deletions(-) diff --git a/src/ospx/component.py b/src/ospx/component.py index 669a47cc..52ce9745 100644 --- a/src/ospx/component.py +++ b/src/ospx/component.py @@ -153,7 +153,9 @@ def _init_variables(self) -> None: @property def variables_with_start_values(self) -> dict[str, ScalarVariable]: - """Returns a dict with all scalar variables for which start values (initial values) + """Return all scalar variables with start values. + + Returns a dict with all scalar variables for which start values (initial values) are defined in the component. Returns diff --git a/src/ospx/connector.py b/src/ospx/connector.py index 6deaa5a1..3ac9aa03 100644 --- a/src/ospx/connector.py +++ b/src/ospx/connector.py @@ -6,7 +6,9 @@ class Connector: - """Connectors allow to explicitely make public a components scalar variable or variable group + """Class representing a connector. + + Connectors allow to explicitely make public a components scalar variable or variable group at the component's outer interface. An connector is for a component what an endpoint is for a connection. diff --git a/src/ospx/fmi/variable.py b/src/ospx/fmi/variable.py index 01e300b4..3803b465 100644 --- a/src/ospx/fmi/variable.py +++ b/src/ospx/fmi/variable.py @@ -233,6 +233,7 @@ def _cast_to_fmi_data_type( Union[int, float, bool, str, List[Any], None] The casted value (in a Python data type that matches the requested fmi data type) """ + # sourcery skip: assign-if-exp, extract-method, reintroduce-else if fmi_data_type in {"Integer", "Real", "Boolean"}: if isinstance(arg, Sequence): logger.warning( diff --git a/src/ospx/graph.py b/src/ospx/graph.py index 4bb3942a..e0eb0e52 100644 --- a/src/ospx/graph.py +++ b/src/ospx/graph.py @@ -18,9 +18,7 @@ class Graph: - """Class providing methods to generate a visual dependency graph - of a system's components and its connections. - """ + """Class providing methods to generate a visual dependency graph of a system's components and its connections.""" @staticmethod def generate_dependency_graph(case: OspSimulationCase) -> None: diff --git a/src/ospx/importer.py b/src/ospx/importer.py index 50e5f020..50238b28 100644 --- a/src/ospx/importer.py +++ b/src/ospx/importer.py @@ -16,9 +16,7 @@ class OspSystemStructureImporter: - """Class providing methods to convert an existing - OspSystemStructure.xml file to an ospx caseDict file. - """ + """Class providing methods to convert an existing OspSystemStructure.xml file to an ospx caseDict file.""" @staticmethod def import_system_structure( diff --git a/src/ospx/ospSimulationCase.py b/src/ospx/ospSimulationCase.py index b99112a9..4d93cc13 100644 --- a/src/ospx/ospSimulationCase.py +++ b/src/ospx/ospSimulationCase.py @@ -626,8 +626,9 @@ def _write_plot_config_json(self) -> None: return def _correct_wrong_xml_namespace(self, file_name: str, pattern: str, replace: str) -> None: - """Substitutes namespace - (may be obsolete in future). + """Substitute namespace. + + (may be obsolete in future) """ buffer = "" with Path(file_name).open() as f: diff --git a/src/ospx/simulation.py b/src/ospx/simulation.py index 89f81506..bdd49fff 100644 --- a/src/ospx/simulation.py +++ b/src/ospx/simulation.py @@ -18,10 +18,12 @@ class Simulation: @property def algorithm(self) -> str | None: + """Return the simulation algorithm.""" return self._algorithm @algorithm.setter def algorithm(self, value: str) -> None: + """Set the simulation algorithm.""" valid_values: list[str] = [ "fixedStep", ] diff --git a/src/ospx/system.py b/src/ospx/system.py index f827ec8a..33867ccc 100644 --- a/src/ospx/system.py +++ b/src/ospx/system.py @@ -27,7 +27,7 @@ def __init__(self, properties: MutableMapping[Any, Any]) -> None: @property def fmus(self) -> dict[str, FMU]: - """Returns a dict with all FMUs referenced by components contained in the system. + """Return a dict with all FMUs referenced by components contained in the system. Returns ------- @@ -38,7 +38,7 @@ def fmus(self) -> dict[str, FMU]: @property def components(self) -> dict[str, Component]: - """Returns a dict with all components contained in the system. + """Return a dict with all components contained in the system. Returns ------- @@ -49,7 +49,7 @@ def components(self) -> dict[str, Component]: @property def connections(self) -> dict[str, Connection]: - """Returns a dict with all connections defined in the system. + """Return a dict with all connections defined in the system. Returns ------- @@ -60,8 +60,7 @@ def connections(self) -> dict[str, Connection]: @property def units(self) -> dict[str, Unit]: - """Returns a combined dict with all units - from all components contained in the system. + """Return a combined dict with all units from all components contained in the system. Returns ------- @@ -76,8 +75,7 @@ def units(self) -> dict[str, Unit]: @property def connectors(self) -> dict[str, Connector]: - """Returns a combined dict with all connectors - from all components contained in the system. + """Return a combined dict with all connectors from all components contained in the system. Returns ------- @@ -92,8 +90,7 @@ def connectors(self) -> dict[str, Connector]: @property def variables(self) -> dict[str, ScalarVariable]: - """Returns a combined dict with all scalar variables - from all components contained in the system. + """Return a combined dict with all scalar variables from all components contained in the system. Returns ------- diff --git a/src/ospx/utils/dict.py b/src/ospx/utils/dict.py index e669fbd1..8959f518 100644 --- a/src/ospx/utils/dict.py +++ b/src/ospx/utils/dict.py @@ -21,7 +21,9 @@ def find_keys(dict_in: MutableMapping[Any, Any], pattern: str) -> list[str] | No def find_type_identifier_in_keys(dict_in: MutableMapping[Any, Any]) -> str | None: - """Find the first key name in dict that contains one of the following type identifier strings: + """Find the first type identifier in dict. + + Find. the first key name in dict that contains one of the following type identifier strings: [Integer|Real|Boolean|Enumeration|String|Unknown]. """ key_list: list[str] = ["Integer", "Real", "Boolean", "Enumeration", "String", "Unkown"] diff --git a/src/ospx/utils/zip.py b/src/ospx/utils/zip.py index 4183359a..322ce429 100644 --- a/src/ospx/utils/zip.py +++ b/src/ospx/utils/zip.py @@ -10,9 +10,9 @@ def read_file_content_from_zip(zip_file: Path, file_name: str) -> str | None: - """ - belongs to zip functions - read a single file. + """Read a single file. + + Belongs to zip functions """ file_handle, temp_name = mkstemp(dir=zip_file.parent) file_content = None @@ -31,9 +31,9 @@ def read_file_content_from_zip(zip_file: Path, file_name: str) -> str | None: def rename_file_in_zip(zip_file: Path, file_name: str, new_file_name: str) -> ZipFile | None: - """ - belongs to zip functions - rename files. + """Rename files. + + Belongs to zip functions. """ file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None @@ -60,9 +60,9 @@ def rename_file_in_zip(zip_file: Path, file_name: str, new_file_name: str) -> Zi def remove_files_from_zip(zip_file: Path, *file_names: str) -> ZipFile | None: - """ - belongs to zip functions - remove files. + """Remove files. + + Belongs to zip functions. """ file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None @@ -88,9 +88,9 @@ def remove_files_from_zip(zip_file: Path, *file_names: str) -> ZipFile | None: def add_file_content_to_zip(zip_file: Path, file_name: str, file_content: str) -> ZipFile | None: - """ - belongs to zip functions - add a single file and its ascii content. + """Add a single file and its ascii content. + + Belongs to zip functions. """ file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None @@ -112,9 +112,9 @@ def add_file_content_to_zip(zip_file: Path, file_name: str, file_content: str) - def substitute_text_in_zip( zip_file: Path, file_name_pattern: str = "", subst: tuple[str, str] = ("", "") ) -> ZipFile | None: - """ - belongs to zip functions - substitutes a given string in all files matching the passed file name pattern. + """Substitute a given string in all files matching the passed file name pattern. + + Belongs to zip functions. """ file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None @@ -144,9 +144,9 @@ def substitute_text_in_zip( def update_file_content_in_zip(zip_file: Path, file_name: str, file_content: str) -> ZipFile | None: - """ - belongs to zip functions - updates the ascii content of a single file. + """Update the ascii content of a single file. + + Belongs to zip functions. """ file_handle, temp_name = mkstemp(dir=zip_file.parent) updated_zip_file = None diff --git a/src/ospx/watch/watchCosim.py b/src/ospx/watch/watchCosim.py index 74307870..6956b6cd 100644 --- a/src/ospx/watch/watchCosim.py +++ b/src/ospx/watch/watchCosim.py @@ -30,7 +30,9 @@ class CosimWatcher: - """CosimWatcher allows to monitor a running simulation, + """Watcher to monitor a running simulation. + + CosimWatcher allows to monitor a running simulation, plot trends and dump simulation results into a resultDict file. """ From 2cb73f204f905649a46d65eda1cce6a093f3a33d Mon Sep 17 00:00:00 2001 From: Claas Date: Wed, 23 Oct 2024 21:01:38 +0200 Subject: [PATCH 17/32] .sourcery.yaml : added exclude for folder docs/build --- .sourcery.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.sourcery.yaml b/.sourcery.yaml index a4019dfb..9d935005 100644 --- a/.sourcery.yaml +++ b/.sourcery.yaml @@ -17,6 +17,7 @@ ignore: # A list of paths or files which Sourcery will ignore. - .tox - dist - __pycache__ + - docs/build - tests/library/spring_mass_damper/Damper - tests/library/spring_mass_damper/Mass - tests/library/spring_mass_damper/Spring @@ -52,7 +53,6 @@ rule_settings: metrics: quality_threshold: 20.0 - # github: # labels: [] # ignore_labels: From db5d5bf3aa0598141a985bf7149ec0add70ccbe4 Mon Sep 17 00:00:00 2001 From: Claas Date: Thu, 24 Oct 2024 12:17:05 +0200 Subject: [PATCH 18/32] replaced `UTC` with `timezone.utc` for compatibility with Python 3.10 --- src/ospx/fmi/fmu.py | 6 +++--- src/ospx/utils/plotting.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/ospx/fmi/fmu.py b/src/ospx/fmi/fmu.py index 8edec98c..c54de296 100644 --- a/src/ospx/fmi/fmu.py +++ b/src/ospx/fmi/fmu.py @@ -5,7 +5,7 @@ import platform import re from copy import deepcopy -from datetime import UTC, datetime +from datetime import datetime, timezone from pathlib import Path from shutil import copyfile from typing import TYPE_CHECKING, Any @@ -354,11 +354,11 @@ def _log_update_in_model_description( model_description["_xmlOpts"]["_rootAttributes"]["author"] = new_author # DateAndTime old_date = model_description["_xmlOpts"]["_rootAttributes"]["generationDateAndTime"] - new_date = str(datetime.now(tz=UTC)) + new_date = str(datetime.now(tz=timezone.utc)) model_description["_xmlOpts"]["_rootAttributes"]["generationDateAndTime"] = new_date # Log modifications in attribute add_description_string = ( - f"\nmodified {datetime.now(tz=UTC).date()}:\n" + f"\nmodified {datetime.now(tz=timezone.utc).date()}:\n" f"\tauthor {old_author} to {new_author}\n" f"\tgenerationDateAndTime {old_date} to {new_date}\n" ) diff --git a/src/ospx/utils/plotting.py b/src/ospx/utils/plotting.py index 2c96a216..7b036e23 100644 --- a/src/ospx/utils/plotting.py +++ b/src/ospx/utils/plotting.py @@ -3,7 +3,7 @@ import os import re from collections.abc import MutableMapping -from datetime import UTC, datetime +from datetime import datetime, timezone from pathlib import Path import matplotlib.pyplot as plt @@ -30,7 +30,7 @@ def create_meta_dict(title: str) -> dict[str, str]: "Author": "VFW", "Description": title, "Copyright": "VFW", - "Creation Time": str(datetime.now(tz=UTC)), + "Creation Time": str(datetime.now(tz=timezone.utc)), "Software": "matplotlib", "Disclaimer": "", "Warning": "", From 8b90c783318dc717329cc6edcdd6d3328f77fde3 Mon Sep 17 00:00:00 2001 From: Claas Date: Thu, 24 Oct 2024 12:17:20 +0200 Subject: [PATCH 19/32] updated CHANGELOG.md --- CHANGELOG.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7dde3703..a4cf8558 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,11 @@ The changelog format is based on [Keep a Changelog](https://keepachangelog.com/e ## [Unreleased] +-/- + + +## [0.2.15] - 2024-10-24 + ### Changed * Changed from `pip`/`tox` to `uv` as package manager * README.md : Completely rewrote section "Development Setup", introducing `uv` as package manager. @@ -423,7 +428,8 @@ Maintenance Release * Added support for Python 3.10 -[unreleased]: https://github.com/dnv-opensource/ospx/compare/v0.2.14...HEAD +[unreleased]: https://github.com/dnv-opensource/ospx/compare/v0.2.15...HEAD +[0.2.15]: https://github.com/dnv-opensource/ospx/compare/v0.2.14...v0.2.15 [0.2.14]: https://github.com/dnv-opensource/ospx/compare/v0.2.13...v0.2.14 [0.2.13]: https://github.com/dnv-opensource/ospx/compare/v0.2.12...v0.2.13 [0.2.12]: https://github.com/dnv-opensource/ospx/compare/v0.2.11...v0.2.12 From 93d28924b42bbdaa6c44b4cf1030177f6b889ae0 Mon Sep 17 00:00:00 2001 From: Claas Date: Thu, 24 Oct 2024 12:17:34 +0200 Subject: [PATCH 20/32] bumped version number to 0.2.15 --- docs/source/conf.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index def4be32..b1158184 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -26,7 +26,7 @@ author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" # The full version, including alpha/beta/rc tags -release = "0.2.14" +release = "0.2.15" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/pyproject.toml b/pyproject.toml index 2e823e37..312a9e4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ packages = [ [project] name = "ospx" -version = "0.2.14" +version = "0.2.15" description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." readme = "README.md" requires-python = ">= 3.10" From 980987009526e2ab84c92dc2d762089fb5fc782e Mon Sep 17 00:00:00 2001 From: Claas Date: Thu, 24 Oct 2024 12:50:35 +0200 Subject: [PATCH 21/32] improved typing of numpy arrays --- src/ospx/watch/watchCosim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ospx/watch/watchCosim.py b/src/ospx/watch/watchCosim.py index 6956b6cd..e87f3e81 100644 --- a/src/ospx/watch/watchCosim.py +++ b/src/ospx/watch/watchCosim.py @@ -207,7 +207,7 @@ def dump(self) -> None: result_dict = {} for header in list(data): - values: ndarray[Any, Any] = data[header].dropna().to_numpy() + values: ndarray[tuple[int], np.dtype[np.float64]] = data[header].dropna().to_numpy() _first_value: Any = values[0] _last_value: Any = values[-1] _mean: float | str = "None" From 62c71606183de4ae5c308efa87fb257021abe3f9 Mon Sep 17 00:00:00 2001 From: Claas Date: Thu, 24 Oct 2024 12:58:43 +0200 Subject: [PATCH 22/32] VS Code settings: Turned off automatic venv activation --- .vscode/settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index f0f4822b..35d8ed7b 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -26,7 +26,7 @@ "python.analysis.inlayHints.functionReturnTypes": false, "python.analysis.inlayHints.pytestParameters": true, "python.terminal.executeInFileDir": true, - "python.terminal.activateEnvironment": true, + "python.terminal.activateEnvironment": false, "python.terminal.activateEnvInCurrentTerminal": false, "python.analysis.packageIndexDepths": [ { From 00878466d8c41f9ab016913a47d0ce9c9ee1cab4 Mon Sep 17 00:00:00 2001 From: Claas Date: Tue, 5 Nov 2024 14:13:59 +0100 Subject: [PATCH 23/32] Sphinx documentation : Resolved issue that documentation of class members was generated twice and added extension to support Markdown-based diagrams created with Mermaid. --- CHANGELOG.md | 4 +++ README.md | 2 +- docs/source/_templates/custom-class.rst | 7 +++--- docs/source/_templates/custom-module.rst | 17 +++++++------ docs/source/api.rst | 24 +++++++++++++++++- docs/source/cli.rst | 18 ++++++------- docs/source/conf.py | 6 ++++- docs/source/ospx.component.rst | 7 ------ docs/source/ospx.connection.rst | 7 ------ docs/source/ospx.connector.rst | 7 ------ docs/source/ospx.fmi.experiment.rst | 7 ------ docs/source/ospx.fmi.fmu.rst | 7 ------ docs/source/ospx.fmi.rst | 18 ------------- docs/source/ospx.fmi.unit.rst | 7 ------ docs/source/ospx.fmi.variable.rst | 7 ------ docs/source/ospx.graph.rst | 7 ------ docs/source/ospx.importer.rst | 7 ------ docs/source/ospx.ospCaseBuilder.rst | 7 ------ docs/source/ospx.ospSimulationCase.rst | 7 ------ docs/source/ospx.rst | 32 ------------------------ docs/source/ospx.simulation.rst | 7 ------ docs/source/ospx.system.rst | 7 ------ docs/source/ospx.watch.rst | 15 ----------- docs/source/ospx.watch.watchCosim.rst | 7 ------ pyproject.toml | 1 + 25 files changed, 55 insertions(+), 187 deletions(-) delete mode 100644 docs/source/ospx.component.rst delete mode 100644 docs/source/ospx.connection.rst delete mode 100644 docs/source/ospx.connector.rst delete mode 100644 docs/source/ospx.fmi.experiment.rst delete mode 100644 docs/source/ospx.fmi.fmu.rst delete mode 100644 docs/source/ospx.fmi.rst delete mode 100644 docs/source/ospx.fmi.unit.rst delete mode 100644 docs/source/ospx.fmi.variable.rst delete mode 100644 docs/source/ospx.graph.rst delete mode 100644 docs/source/ospx.importer.rst delete mode 100644 docs/source/ospx.ospCaseBuilder.rst delete mode 100644 docs/source/ospx.ospSimulationCase.rst delete mode 100644 docs/source/ospx.rst delete mode 100644 docs/source/ospx.simulation.rst delete mode 100644 docs/source/ospx.system.rst delete mode 100644 docs/source/ospx.watch.rst delete mode 100644 docs/source/ospx.watch.watchCosim.rst diff --git a/CHANGELOG.md b/CHANGELOG.md index a4cf8558..140be80b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,7 +18,11 @@ The changelog format is based on [Keep a Changelog](https://keepachangelog.com/e * VS Code settings: Turned off automatic venv activation * Replaced black formatter with ruff formatter +### Solved +* Sphinx documentation: Resolved issue that documentation of class members was generated twice. + ### Added +* Sphinx documentation: Added extension to support Markdown-based diagrams created with Mermaid. * Added `mypy` as static type checker (in addition to `pyright`) ### GitHub workflows diff --git a/README.md b/README.md index 77b310ab..f1fa554b 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ _For more examples and usage, please refer to [ospx's documentation][ospx_docs]. ## File Format A caseDict is a file in dictIO dict file format used with farn. -_For a documentation of the caseDict file format, see [File Format](fileFormat.md) in [ospx's documentation][ospx_docs] on GitHub Pages._ +_For a documentation of the caseDict file format, see [File Format](fileFormat.rst) in [ospx's documentation][ospx_docs] on GitHub Pages._ _For a detailed documentation of the dictIO dict file format used by farn, see [dictIO's documentation][dictIO_docs] on GitHub Pages._ diff --git a/docs/source/_templates/custom-class.rst b/docs/source/_templates/custom-class.rst index 76bb02fa..a58baba0 100644 --- a/docs/source/_templates/custom-class.rst +++ b/docs/source/_templates/custom-class.rst @@ -1,4 +1,4 @@ -{{ fullname | escape | underline}} +{{ name | escape | underline}} .. currentmodule:: {{ module }} @@ -6,12 +6,11 @@ :members: :show-inheritance: + {% block methods %} .. automethod:: __init__ - {% if methods %} .. rubric:: {{ _('Methods') }} - .. autosummary:: {% for item in methods %} ~{{ name }}.{{ item }} @@ -19,10 +18,10 @@ {% endif %} {% endblock %} + {% block attributes %} {% if attributes %} .. rubric:: {{ _('Attributes') }} - .. autosummary:: {% for item in attributes %} ~{{ name }}.{{ item }} diff --git a/docs/source/_templates/custom-module.rst b/docs/source/_templates/custom-module.rst index 914d438e..16408531 100644 --- a/docs/source/_templates/custom-module.rst +++ b/docs/source/_templates/custom-module.rst @@ -1,35 +1,37 @@ {{ fullname | escape | underline}} .. automodule:: {{ fullname }} + :members: + {%- if classes %} + :exclude-members: {% for item in classes %}{{ item }}{{','}}{%- endfor %} + {% endif %} + {% block attributes %} {%- if attributes %} .. rubric:: {{ _('Module Attributes') }} - .. autosummary:: - :toctree: {% for item in attributes %} {{ item }} {%- endfor %} {% endif %} {%- endblock %} + {%- block functions %} {%- if functions %} .. rubric:: {{ _('Functions') }} - .. autosummary:: - :toctree: {% for item in functions %} {{ item }} {%- endfor %} {% endif %} {%- endblock %} + {%- block classes %} {%- if classes %} .. rubric:: {{ _('Classes') }} - .. autosummary:: :toctree: :template: custom-class.rst @@ -39,22 +41,21 @@ {% endif %} {%- endblock %} + {%- block exceptions %} {%- if exceptions %} .. rubric:: {{ _('Exceptions') }} - .. autosummary:: - :toctree: {% for item in exceptions %} {{ item }} {%- endfor %} {% endif %} {%- endblock %} + {%- block modules %} {%- if modules %} .. rubric:: Modules - .. autosummary:: :toctree: :template: custom-module.rst diff --git a/docs/source/api.rst b/docs/source/api.rst index 1a9fdf1b..6230c04b 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -1,9 +1,31 @@ API Reference ============= +Subpackages +----------- + +.. autosummary:: + :toctree: _autosummary + :template: custom-module.rst + :recursive: + + ospx.fmi + ospx.watch + +Submodules +---------- + .. autosummary:: :toctree: _autosummary :template: custom-module.rst :recursive: - ospx + ospx.component + ospx.connection + ospx.connector + ospx.graph + ospx.importer + ospx.ospCaseBuilder + ospx.ospSimulationCase + ospx.simulation + ospx.system diff --git a/docs/source/cli.rst b/docs/source/cli.rst index 4cc92ecb..29e14b15 100644 --- a/docs/source/cli.rst +++ b/docs/source/cli.rst @@ -1,9 +1,9 @@ -CLI Documentation -================= - -.. toctree:: - :maxdepth: 2 - - cli.ospCaseBuilder - cli.importSystemStructure - cli.watchCosim +CLI Documentation +================= + +.. toctree:: + :maxdepth: 3 + + cli.ospCaseBuilder + cli.importSystemStructure + cli.watchCosim diff --git a/docs/source/conf.py b/docs/source/conf.py index b1158184..79908fea 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -40,6 +40,7 @@ "matplotlib.sphinxext.plot_directive", "sphinx.ext.autosummary", "sphinx.ext.todo", + "sphinxcontrib.mermaid", ] # The file extensions of source files. @@ -65,10 +66,13 @@ html_static_path = ["_static"] html_logo = "_static/OSPx.svg" autodoc_default_options = { - "member-order": "bysource", + "member-order": "groupwise", "undoc-members": True, "exclude-members": "__weakref__", } autodoc_preserve_defaults = True myst_heading_anchors = 3 + +# add markdown mermaid support +myst_fence_as_directive = ["mermaid"] diff --git a/docs/source/ospx.component.rst b/docs/source/ospx.component.rst deleted file mode 100644 index 7afa3ce2..00000000 --- a/docs/source/ospx.component.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.component module -===================== - -.. automodule:: ospx.component - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.connection.rst b/docs/source/ospx.connection.rst deleted file mode 100644 index 92e6305a..00000000 --- a/docs/source/ospx.connection.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.connection module -====================== - -.. automodule:: ospx.connection - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.connector.rst b/docs/source/ospx.connector.rst deleted file mode 100644 index dc2e89b0..00000000 --- a/docs/source/ospx.connector.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.connector module -===================== - -.. automodule:: ospx.connector - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.fmi.experiment.rst b/docs/source/ospx.fmi.experiment.rst deleted file mode 100644 index 7df24978..00000000 --- a/docs/source/ospx.fmi.experiment.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.fmi.experiment module -========================== - -.. automodule:: ospx.fmi.experiment - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.fmi.fmu.rst b/docs/source/ospx.fmi.fmu.rst deleted file mode 100644 index c6749a11..00000000 --- a/docs/source/ospx.fmi.fmu.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.fmi.fmu module -=================== - -.. automodule:: ospx.fmi.fmu - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.fmi.rst b/docs/source/ospx.fmi.rst deleted file mode 100644 index 1e49c726..00000000 --- a/docs/source/ospx.fmi.rst +++ /dev/null @@ -1,18 +0,0 @@ -ospx.fmi package -================ - -.. automodule:: ospx.fmi - :members: - :undoc-members: - :show-inheritance: - -Submodules ----------- - -.. toctree:: - :maxdepth: 4 - - ospx.fmi.experiment - ospx.fmi.fmu - ospx.fmi.unit - ospx.fmi.variable diff --git a/docs/source/ospx.fmi.unit.rst b/docs/source/ospx.fmi.unit.rst deleted file mode 100644 index f1f82131..00000000 --- a/docs/source/ospx.fmi.unit.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.fmi.unit module -==================== - -.. automodule:: ospx.fmi.unit - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.fmi.variable.rst b/docs/source/ospx.fmi.variable.rst deleted file mode 100644 index 67e8a34b..00000000 --- a/docs/source/ospx.fmi.variable.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.fmi.variable module -======================== - -.. automodule:: ospx.fmi.variable - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.graph.rst b/docs/source/ospx.graph.rst deleted file mode 100644 index f86c5c83..00000000 --- a/docs/source/ospx.graph.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.graph module -================= - -.. automodule:: ospx.graph - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.importer.rst b/docs/source/ospx.importer.rst deleted file mode 100644 index 37687a7f..00000000 --- a/docs/source/ospx.importer.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.importer module -==================== - -.. automodule:: ospx.importer - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.ospCaseBuilder.rst b/docs/source/ospx.ospCaseBuilder.rst deleted file mode 100644 index bd799959..00000000 --- a/docs/source/ospx.ospCaseBuilder.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.ospCaseBuilder module -========================== - -.. automodule:: ospx.ospCaseBuilder - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.ospSimulationCase.rst b/docs/source/ospx.ospSimulationCase.rst deleted file mode 100644 index 3fa91619..00000000 --- a/docs/source/ospx.ospSimulationCase.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.ospSimulationCase module -============================= - -.. automodule:: ospx.ospSimulationCase - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.rst b/docs/source/ospx.rst deleted file mode 100644 index db7f66af..00000000 --- a/docs/source/ospx.rst +++ /dev/null @@ -1,32 +0,0 @@ -ospx package -============ - -.. automodule:: ospx - :members: - :undoc-members: - :show-inheritance: - -Subpackages ------------ - -.. toctree:: - :maxdepth: 4 - - ospx.fmi - ospx.watch - -Submodules ----------- - -.. toctree:: - :maxdepth: 4 - - ospx.component - ospx.connection - ospx.connector - ospx.graph - ospx.importer - ospx.ospCaseBuilder - ospx.ospSimulationCase - ospx.simulation - ospx.system diff --git a/docs/source/ospx.simulation.rst b/docs/source/ospx.simulation.rst deleted file mode 100644 index 08c7e1ce..00000000 --- a/docs/source/ospx.simulation.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.simulation module -====================== - -.. automodule:: ospx.simulation - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.system.rst b/docs/source/ospx.system.rst deleted file mode 100644 index 925333b9..00000000 --- a/docs/source/ospx.system.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.system module -================== - -.. automodule:: ospx.system - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/source/ospx.watch.rst b/docs/source/ospx.watch.rst deleted file mode 100644 index d89fc285..00000000 --- a/docs/source/ospx.watch.rst +++ /dev/null @@ -1,15 +0,0 @@ -ospx.watch package -================== - -.. automodule:: ospx.watch - :members: - :undoc-members: - :show-inheritance: - -Submodules ----------- - -.. toctree:: - :maxdepth: 4 - - ospx.watch.watchCosim diff --git a/docs/source/ospx.watch.watchCosim.rst b/docs/source/ospx.watch.watchCosim.rst deleted file mode 100644 index 7220895f..00000000 --- a/docs/source/ospx.watch.watchCosim.rst +++ /dev/null @@ -1,7 +0,0 @@ -ospx.watch.watchCosim module -============================ - -.. automodule:: ospx.watch.watchCosim - :members: - :undoc-members: - :show-inheritance: diff --git a/pyproject.toml b/pyproject.toml index 312a9e4c..671d925b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,6 +80,7 @@ dev-dependencies = [ "Sphinx>=8.0", "sphinx-argparse-cli>=1.17", "sphinx-autodoc-typehints>=2.2", + "sphinxcontrib-mermaid>=1.0.0", "myst-parser>=4.0", "furo>=2024.8", "nbconvert>=7.16", From 51d48d083ce5e7c8bfa7c46520e711fff35f45d6 Mon Sep 17 00:00:00 2001 From: Claas Date: Sun, 10 Nov 2024 14:16:22 +0100 Subject: [PATCH 24/32] .github/workflows/publish_release.yml : Run publishing job from main (non-reusable) workflow. Remove the two reusable workflows _publish_package.yml and _publish_package_test.yml. Background for this change is a limitation recently introduced on pypi, which does not allow anylonger to run the GitHub action 'pypa/gh-action-pypi-publish' from a reusable workflow. The code hence needed to be moved upwards, from the reusable workflow _publish_package.yml into the (non-reusable) workflow publish_release.yml See https://github.com/marketplace/actions/pypi-publish -> Note under "Trusted Publishing" --- .github/workflows/_publish_package.yml | 17 ----------------- .github/workflows/publish_release.yml | 14 +++++++++++++- 2 files changed, 13 insertions(+), 18 deletions(-) delete mode 100644 .github/workflows/_publish_package.yml diff --git a/.github/workflows/_publish_package.yml b/.github/workflows/_publish_package.yml deleted file mode 100644 index 0779a17c..00000000 --- a/.github/workflows/_publish_package.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Publish Package to pypi - -on: workflow_call - -jobs: - publish: - name: Publish package - runs-on: ubuntu-latest - environment: release - permissions: - id-token: write - steps: - - uses: actions/download-artifact@v4 - with: - name: artifact - path: dist - - uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/publish_release.yml b/.github/workflows/publish_release.yml index 9bf6d7c7..faff0983 100644 --- a/.github/workflows/publish_release.yml +++ b/.github/workflows/publish_release.yml @@ -10,9 +10,21 @@ jobs: build_package: uses: ./.github/workflows/_build_package.yml publish_package: + name: Publish package needs: - build_package - uses: ./.github/workflows/_publish_package.yml + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write + steps: + - uses: actions/download-artifact@v4 + with: + name: artifact + path: dist + - uses: pypa/gh-action-pypi-publish@release/v1 + # with: # Uncomment this line to publish to testpypi + # repository-url: https://test.pypi.org/legacy/ # Uncomment this line to publish to testpypi merge_into_release: uses: ./.github/workflows/_merge_into_release.yml secrets: From 9f7a0a8006067f9e9afb502e889c2ec2b090c558 Mon Sep 17 00:00:00 2001 From: Claas Date: Sun, 10 Nov 2024 22:03:16 +0100 Subject: [PATCH 25/32] dependencies: updated to dictIO>=0.4.0b1 (from dictIO>=0.3.4) --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 671d925b..d22d84dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ dependencies = [ "pandas>=2.2", "matplotlib>=3.9", "graphviz>=0.20", - "dictIO>=0.3.4", + "dictIO>=0.4.0b1", ] [project.urls] @@ -90,6 +90,7 @@ dev-dependencies = [ "types-lxml>=2024.4", ] native-tls = true +# prerelease = "explicit" [project.scripts] From a69d1c09538ff4d0a1c221f12522b7236313ad21 Mon Sep 17 00:00:00 2001 From: Claas Date: Sun, 10 Nov 2024 22:25:03 +0100 Subject: [PATCH 26/32] ruff.toml : Allow __init__.py modules to re-export imported names --- ruff.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ruff.toml b/ruff.toml index ab5d823e..88d7a301 100644 --- a/ruff.toml +++ b/ruff.toml @@ -103,8 +103,9 @@ raises-require-match-for = [ [lint.per-file-ignores] # `__init__.py` specific ignores "__init__.py" = [ - "F401", # {name} imported but unused (NOTE: ignored as imports in `__init__.py` files are almost never used inside the module, but are intended for namespaces) - "I001", # Import block is un-sorted or un-formatted + "F401", # {name} imported but unused (NOTE: ignored as imports in `__init__.py` files are almost never used inside the module, but are intended for namespaces) + "I001", # Import block is un-sorted or un-formatted + "PLC0414", # Import alias does not rename original package ] # `tests` specific ignores "tests/**/*" = [ From 0bbbc7a67a539e585eb4f83c5a7af56b7c8241fd Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 11 Nov 2024 15:52:53 +0100 Subject: [PATCH 27/32] code adapted to work with dictIO v0.4.0b6 --- pyproject.toml | 2 +- src/ospx/component.py | 2 +- src/ospx/fmi/fmu.py | 80 ++++++++++++++++++---------------- src/ospx/fmi/variable.py | 17 +++++--- src/ospx/ospCaseBuilder.py | 5 ++- src/ospx/ospSimulationCase.py | 6 +-- tests/test_dicts/test_fmu.fmu | Bin 1227 -> 1227 bytes 7 files changed, 62 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d22d84dd..09222a4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ dependencies = [ "pandas>=2.2", "matplotlib>=3.9", "graphviz>=0.20", - "dictIO>=0.4.0b1", + "dictIO>=0.4.0b2", ] [project.urls] diff --git a/src/ospx/component.py b/src/ospx/component.py index 52ce9745..040f67b6 100644 --- a/src/ospx/component.py +++ b/src/ospx/component.py @@ -270,7 +270,7 @@ def write_osp_model_description_xml(self) -> None: "_rootTag": "ospModelDescription", } - DictWriter.write(osp_model_description, osp_model_description_file) + DictWriter.write(source_dict=osp_model_description, target_file=osp_model_description_file) def _clean(self, file_to_remove: str | Path) -> None: """Clean up single file.""" diff --git a/src/ospx/fmi/fmu.py b/src/ospx/fmi/fmu.py index c54de296..ec52b4a3 100644 --- a/src/ospx/fmi/fmu.py +++ b/src/ospx/fmi/fmu.py @@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any from zipfile import ZipFile -from dictIO import CppDict, XmlFormatter, XmlParser +from dictIO import SDict, XmlFormatter, XmlParser from dictIO.utils.counter import BorgCounter from ospx.fmi import BaseUnit, DisplayUnit, Experiment, ScalarVariable, Unit @@ -45,11 +45,11 @@ def __init__(self, file: str | os.PathLike[str]) -> None: raise FileNotFoundError(file) self.file: Path = file - self.model_description: CppDict = self._read_model_description() + self.model_description: SDict[str, Any] = self._read_model_description() self.counter = BorgCounter() - def _read_model_description(self) -> CppDict: - model_description = CppDict(Path("modelDescription.xml")) + def _read_model_description(self) -> SDict[str, Any]: + model_description: SDict[str, Any] = SDict(Path("modelDescription.xml")) xml_parser = XmlParser() logger.info(f"{self.file.name}: read modelDescription.xml") @@ -65,7 +65,7 @@ def _read_model_description(self) -> CppDict: def _write_model_description( self, - model_description: CppDict | None = None, + model_description: SDict[str, Any] | None = None, *, write_inside_fmu: bool = False, ) -> None: @@ -240,7 +240,7 @@ def copy(self, new_name: str) -> FMU: existing_file_name = self.file.stem if new_name == existing_file_name: logger.error(f"{self.file.name} copy: new name {new_name} is identical with existing name. copy() aborted.") - new_model_description: CppDict = deepcopy(self.model_description) + new_model_description: SDict[str, Any] = deepcopy(self.model_description) new_file = self.file.parent.absolute() / f"{new_name}.fmu" # Copy FMU @@ -261,12 +261,12 @@ def copy(self, new_name: str) -> FMU: # Rename in modelDescription.xml new_model_description["_xmlOpts"]["_rootAttributes"]["modelName"] = new_name - # Rename in modelDescription.xml - # (STC requires consistency between and ) - co_simulation: MutableMapping[Any, Any] = new_model_description[ - find_key(new_model_description, "CoSimulation$") - ] - co_simulation["_attributes"]["modelIdentifier"] = new_name + if _key := find_key( + dict_in=new_model_description, + pattern="CoSimulation$", + ): + co_simulation: MutableMapping[str, Any] = new_model_description[_key] + co_simulation["_attributes"]["modelIdentifier"] = new_name # Log the update in modelDescription.xml self._log_update_in_model_description(new_model_description) @@ -310,40 +310,46 @@ def _modify_start_values(self, variables_with_start_values: dict[str, ScalarVari """Modify the start values of variables inside the FMUs modelDescription.xml.""" logger.info(f"{self.file.name}: update start values of variables in modelDescription.xml") # 2 - model_variables: MutableMapping[Any, Any] = self.model_description[ - find_key(self.model_description, "ModelVariables$") - ] - names_of_variables_with_start_values: list[str] = [ variable.name for _, variable in variables_with_start_values.items() ] - for model_variable_key, model_variable_properties in model_variables.items(): - model_variable_name: str = model_variable_properties["_attributes"]["name"] - - if model_variable_name in names_of_variables_with_start_values: - variable_with_start_values = variables_with_start_values[model_variable_name] - type_identifier = find_type_identifier_in_keys(model_variable_properties) - type_key = find_key(model_variable_properties, f"{type_identifier}$") - - logger.info( - f"{self.file.name}: update start values for variable {model_variable_name}:\n" - f"\tstart:\t\t{variable_with_start_values.start}\n" - f"\tcausality:\t {variable_with_start_values.causality}\n" - f"\tvariability:\t{variable_with_start_values.variability}" - ) - - model_variables[model_variable_key][type_key]["_attributes"]["start"] = variable_with_start_values.start - model_variables[model_variable_key]["_attributes"]["causality"] = variable_with_start_values.causality - model_variables[model_variable_key]["_attributes"]["variability"] = ( - variable_with_start_values.variability - ) + if _key := find_key( + dict_in=self.model_description, + pattern="ModelVariables$", + ): + model_variables: MutableMapping[Any, Any] = self.model_description[_key] + + for model_variable_key, model_variable_properties in model_variables.items(): + model_variable_name: str = model_variable_properties["_attributes"]["name"] + + if model_variable_name in names_of_variables_with_start_values: + variable_with_start_values = variables_with_start_values[model_variable_name] + type_identifier = find_type_identifier_in_keys(model_variable_properties) + type_key = find_key(model_variable_properties, f"{type_identifier}$") + + logger.info( + f"{self.file.name}: update start values for variable {model_variable_name}:\n" + f"\tstart:\t\t{variable_with_start_values.start}\n" + f"\tcausality:\t {variable_with_start_values.causality}\n" + f"\tvariability:\t{variable_with_start_values.variability}" + ) + + model_variables[model_variable_key][type_key]["_attributes"]["start"] = ( + variable_with_start_values.start + ) + model_variables[model_variable_key]["_attributes"]["causality"] = ( + variable_with_start_values.causality + ) + model_variables[model_variable_key]["_attributes"]["variability"] = ( + variable_with_start_values.variability + ) self._log_update_in_model_description() def _log_update_in_model_description( self, - model_description: CppDict | None = None, + model_description: SDict[str, Any] | None = None, ) -> None: model_description = model_description or self.model_description diff --git a/src/ospx/fmi/variable.py b/src/ospx/fmi/variable.py index 3803b465..ca0a6d94 100644 --- a/src/ospx/fmi/variable.py +++ b/src/ospx/fmi/variable.py @@ -1,9 +1,12 @@ # ruff: noqa: PYI041 import logging from collections.abc import Iterable, Sequence -from typing import Any +from typing import TYPE_CHECKING, Any -from dictIO import Formatter, Parser +from dictIO import Formatter, NativeFormatter, Parser + +if TYPE_CHECKING: + from dictIO.types import TSingleValue __ALL__ = ["ScalarVariable", "get_fmi_data_type"] @@ -222,7 +225,7 @@ def _cast_to_fmi_data_type( Parameters ---------- - arg : Union[int, float, bool, str, Sequence[Any]] + arg : int | float | bool | str | Sequence[Any] The argument to be casted fmi_data_type : str The fmi data type the argument shall be casted to.\n @@ -242,17 +245,19 @@ def _cast_to_fmi_data_type( ) return None # parse if arg is string - parsed_value: int | float | bool - parsed_value = Parser().parse_type(arg) if isinstance(arg, str) else arg + parsed_value: TSingleValue + parsed_value = Parser().parse_value(arg) if isinstance(arg, str) else arg # cast to int / float / bool if fmi_data_type == "Integer": + assert parsed_value is not None return int(parsed_value) if fmi_data_type == "Real": + assert parsed_value is not None return float(parsed_value) return bool(parsed_value) if fmi_data_type == "String": # format as string - return Formatter().format_dict(arg) if isinstance(arg, Sequence) else Formatter().format_type(arg) + return NativeFormatter().format_dict(arg) if isinstance(arg, Sequence) else Formatter().format_value(arg) if fmi_data_type == "Enumeration": # cast to list return list(arg) if isinstance(arg, Iterable) else [arg] diff --git a/src/ospx/ospCaseBuilder.py b/src/ospx/ospCaseBuilder.py index 7ed5748e..b7849030 100644 --- a/src/ospx/ospCaseBuilder.py +++ b/src/ospx/ospCaseBuilder.py @@ -1,8 +1,9 @@ import logging import os from pathlib import Path +from typing import Any -from dictIO import CppDict, DictReader +from dictIO import DictReader, SDict from ospx import Graph, OspSimulationCase @@ -64,7 +65,7 @@ def build( logger.info(f"reading {case_dict_file}") # 0 - case_dict: CppDict = DictReader.read(case_dict_file, comments=False) + case_dict: SDict[str, Any] = DictReader.read(case_dict_file, comments=False) case = OspSimulationCase(case_dict) try: diff --git a/src/ospx/ospSimulationCase.py b/src/ospx/ospSimulationCase.py index 4d93cc13..bedc23de 100644 --- a/src/ospx/ospSimulationCase.py +++ b/src/ospx/ospSimulationCase.py @@ -4,7 +4,7 @@ from shutil import copy2 from typing import Any -from dictIO import CppDict, DictWriter, XmlFormatter +from dictIO import DictWriter, SDict, XmlFormatter from dictIO.utils.counter import BorgCounter from dictIO.utils.path import relative_path @@ -21,10 +21,10 @@ class OspSimulationCase: def __init__( self, - case_dict: CppDict, + case_dict: SDict[str, Any], ) -> None: self.counter = BorgCounter() - self.case_dict: CppDict = case_dict + self.case_dict: SDict[str, Any] = case_dict self.case_folder: Path = case_dict.source_file.resolve().parent if case_dict.source_file else Path.cwd() self.system_structure: System diff --git a/tests/test_dicts/test_fmu.fmu b/tests/test_dicts/test_fmu.fmu index 7c593b389da2abb4f38b0373a9aff8f58c3d966b..573b6122348cb5b52d19b4e96d884c2c539802f1 100644 GIT binary patch delta 30 kcmX@jd76_qz?+#xgn@y9gQ2!Adn0cV3p0?O+{EGu0C3R<*#H0l delta 30 kcmX@jd76_qz?+#xgn@y9gTbvfbR%yO3p0?O+{EGu0BUy!Jpcdz From 4ce34c24c6eb48ae525f2cb6211116e5bcf0f656 Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 11 Nov 2024 15:54:19 +0100 Subject: [PATCH 28/32] bumped version number to 0.3.0b1 --- docs/source/conf.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 79908fea..951900b7 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -26,7 +26,7 @@ author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" # The full version, including alpha/beta/rc tags -release = "0.2.15" +release = "0.3.0b1" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/pyproject.toml b/pyproject.toml index 09222a4e..1e31ee5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ packages = [ [project] name = "ospx" -version = "0.2.15" +version = "0.3.0b1" description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." readme = "README.md" requires-python = ">= 3.10" From eda78e6910fe3102b91d91e91e3b23e22219fd3d Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 11 Nov 2024 17:03:49 +0100 Subject: [PATCH 29/32] bumped version number to 0.3.0b3 --- docs/source/conf.py | 2 +- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 951900b7..bec2ee59 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -26,7 +26,7 @@ author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" # The full version, including alpha/beta/rc tags -release = "0.3.0b1" +release = "0.3.0b3" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/pyproject.toml b/pyproject.toml index 1e31ee5a..594220f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ packages = [ [project] name = "ospx" -version = "0.3.0b1" +version = "0.3.0b3" description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." readme = "README.md" requires-python = ">= 3.10" @@ -57,7 +57,7 @@ dependencies = [ "pandas>=2.2", "matplotlib>=3.9", "graphviz>=0.20", - "dictIO>=0.4.0b2", + "dictIO>=0.4.0b9", ] [project.urls] From 8f1e20ccca482755bff5a1978803333e28095824 Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 11 Nov 2024 18:24:05 +0100 Subject: [PATCH 30/32] bumped version number to 0.3.0b4 --- docs/source/conf.py | 2 +- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index bec2ee59..466744ae 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -26,7 +26,7 @@ author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" # The full version, including alpha/beta/rc tags -release = "0.3.0b3" +release = "0.3.0b4" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/pyproject.toml b/pyproject.toml index 594220f0..e2353e9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ packages = [ [project] name = "ospx" -version = "0.3.0b3" +version = "0.3.0b4" description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." readme = "README.md" requires-python = ">= 3.10" @@ -57,7 +57,7 @@ dependencies = [ "pandas>=2.2", "matplotlib>=3.9", "graphviz>=0.20", - "dictIO>=0.4.0b9", + "dictIO>=0.4.0b10", ] [project.urls] From e4086859f7d6e359395ec94da7e0acd762204fc2 Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 11 Nov 2024 18:31:21 +0100 Subject: [PATCH 31/32] __init__.py's : re-exported imported names --- src/ospx/__init__.py | 20 ++++++++++---------- src/ospx/fmi/__init__.py | 14 +++++++------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/ospx/__init__.py b/src/ospx/__init__.py index 9522e3c2..b5600751 100644 --- a/src/ospx/__init__.py +++ b/src/ospx/__init__.py @@ -1,12 +1,12 @@ -from ospx.simulation import Simulation -from ospx.connector import Connector +from ospx.simulation import Simulation as Simulation +from ospx.connector import Connector as Connector from ospx.connection import ( - Endpoint, - Connection, + Endpoint as Endpoint, + Connection as Connection, ) -from ospx.component import Component -from ospx.system import System -from ospx.ospSimulationCase import OspSimulationCase -from ospx.graph import Graph -from ospx.ospCaseBuilder import OspCaseBuilder -from ospx.importer import OspSystemStructureImporter +from ospx.component import Component as Component +from ospx.system import System as System +from ospx.ospSimulationCase import OspSimulationCase as OspSimulationCase +from ospx.graph import Graph as Graph +from ospx.ospCaseBuilder import OspCaseBuilder as OspCaseBuilder +from ospx.importer import OspSystemStructureImporter as OspSystemStructureImporter diff --git a/src/ospx/fmi/__init__.py b/src/ospx/fmi/__init__.py index e9a4e8f3..adc6c3d5 100644 --- a/src/ospx/fmi/__init__.py +++ b/src/ospx/fmi/__init__.py @@ -1,11 +1,11 @@ -from ospx.fmi.experiment import Experiment +from ospx.fmi.experiment import Experiment as Experiment from ospx.fmi.unit import ( - Unit, - BaseUnit, - DisplayUnit, + Unit as Unit, + BaseUnit as BaseUnit, + DisplayUnit as DisplayUnit, ) from ospx.fmi.variable import ( - ScalarVariable, - get_fmi_data_type, + ScalarVariable as ScalarVariable, + get_fmi_data_type as get_fmi_data_type, ) -from ospx.fmi.fmu import FMU +from ospx.fmi.fmu import FMU as FMU From 77896cf780b32666f4ccb076f9a7fa39fa5fd571 Mon Sep 17 00:00:00 2001 From: Claas Date: Mon, 11 Nov 2024 18:32:09 +0100 Subject: [PATCH 32/32] bumped version number to 0.3.0b5 --- docs/source/conf.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 466744ae..3f4b5d65 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -26,7 +26,7 @@ author = "Frank Lumpitzsch, Claas Rostock, Seung Hyeon Yoo" # The full version, including alpha/beta/rc tags -release = "0.3.0b4" +release = "0.3.0b5" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/pyproject.toml b/pyproject.toml index e2353e9f..833f346f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ packages = [ [project] name = "ospx" -version = "0.3.0b4" +version = "0.3.0b5" description = "Extension package to farn, adding support to build OSP simulation cases using FMUs." readme = "README.md" requires-python = ">= 3.10"