From 162509d57cf24993dda002f3169bc0c39f12f02b Mon Sep 17 00:00:00 2001 From: JakobEliasWagner Date: Thu, 14 Mar 2024 18:24:57 +0000 Subject: [PATCH] deploy: 85ee6450932d2b974dac243ed402a6b7153002f4 --- .buildinfo | 4 + .nojekyll | 0 _sources/coverage.rst.txt | 4 + _sources/examples.rst.txt | 3 + _sources/index.rst.txt | 31 + _sources/modules.rst.txt | 7 + _sources/nos.benchmarks.rst.txt | 29 + _sources/nos.data.rst.txt | 21 + _sources/nos.metrics.rst.txt | 37 + _sources/nos.networks.rst.txt | 21 + _sources/nos.operators.rst.txt | 37 + _sources/nos.rst.txt | 24 + _sources/nos.trainers.rst.txt | 29 + _sources/nos.transforms.rst.txt | 21 + _static/alabaster.css | 708 ++++++++++++++ _static/basic.css | 925 ++++++++++++++++++ _static/codecov/coverage_html.js | 624 ++++++++++++ .../d_040e78c194e497af_scaling_py.html | 111 +++ ...93093ff38057b02c_deep_dot_operator_py.html | 155 +++ ...3093ff38057b02c_deep_root_operator_py.html | 167 ++++ ...057b02c_mean_stack_neural_operator_py.html | 171 ++++ .../d_99aac0089c5f15d8_error_metrics_py.html | 150 +++ .../codecov/d_99aac0089c5f15d8_metric_py.html | 133 +++ ..._99aac0089c5f15d8_operator_metrics_py.html | 139 +++ .../d_a94113ed293c43ea_benchmark_py.html | 125 +++ ...a94113ed293c43ea_transmission_loss_py.html | 112 +++ ..._db4399fe0ac4c92a_transmssion_loss_py.html | 272 +++++ .../d_dfcfe80011fec8d7_residual_py.html | 144 +++ .../d_e3bc015ca2131802_average_metric_py.html | 124 +++ .../d_e3bc015ca2131802_trainer_py.html | 234 +++++ _static/codecov/favicon_32.png | Bin 0 -> 1732 bytes _static/codecov/index.html | 186 ++++ _static/codecov/keybd_closed.png | Bin 0 -> 9004 bytes _static/codecov/keybd_open.png | Bin 0 -> 9003 bytes _static/codecov/status.json | 1 + _static/codecov/style.css | 309 ++++++ _static/custom.css | 1 + _static/doctools.js | 156 +++ _static/documentation_options.js | 13 + _static/file.png | Bin 0 -> 286 bytes _static/language_data.js | 199 ++++ _static/minus.png | Bin 0 -> 90 bytes _static/plus.png | Bin 0 -> 90 bytes _static/pygments.css | 84 ++ _static/searchtools.js | 574 +++++++++++ _static/sphinx_highlight.js | 154 +++ coverage.html | 112 +++ examples.html | 107 ++ genindex.html | 489 +++++++++ index.html | 133 +++ modules.html | 171 ++++ nos.benchmarks.html | 170 ++++ nos.data.html | 165 ++++ nos.html | 283 ++++++ nos.metrics.html | 250 +++++ nos.networks.html | 205 ++++ nos.operators.html | 297 ++++++ nos.trainers.html | 128 +++ nos.transforms.html | 165 ++++ objects.inv | Bin 0 -> 912 bytes py-modindex.html | 204 ++++ search.html | 123 +++ searchindex.js | 1 + 63 files changed, 9242 insertions(+) create mode 100644 .buildinfo create mode 100644 .nojekyll create mode 100644 _sources/coverage.rst.txt create mode 100644 _sources/examples.rst.txt create mode 100644 _sources/index.rst.txt create mode 100644 _sources/modules.rst.txt create mode 100644 _sources/nos.benchmarks.rst.txt create mode 100644 _sources/nos.data.rst.txt create mode 100644 _sources/nos.metrics.rst.txt create mode 100644 _sources/nos.networks.rst.txt create mode 100644 _sources/nos.operators.rst.txt create mode 100644 _sources/nos.rst.txt create mode 100644 _sources/nos.trainers.rst.txt create mode 100644 _sources/nos.transforms.rst.txt create mode 100644 _static/alabaster.css create mode 100644 _static/basic.css create mode 100644 _static/codecov/coverage_html.js create mode 100644 _static/codecov/d_040e78c194e497af_scaling_py.html create mode 100644 _static/codecov/d_93093ff38057b02c_deep_dot_operator_py.html create mode 100644 _static/codecov/d_93093ff38057b02c_deep_root_operator_py.html create mode 100644 _static/codecov/d_93093ff38057b02c_mean_stack_neural_operator_py.html create mode 100644 _static/codecov/d_99aac0089c5f15d8_error_metrics_py.html create mode 100644 _static/codecov/d_99aac0089c5f15d8_metric_py.html create mode 100644 _static/codecov/d_99aac0089c5f15d8_operator_metrics_py.html create mode 100644 _static/codecov/d_a94113ed293c43ea_benchmark_py.html create mode 100644 _static/codecov/d_a94113ed293c43ea_transmission_loss_py.html create mode 100644 _static/codecov/d_db4399fe0ac4c92a_transmssion_loss_py.html create mode 100644 _static/codecov/d_dfcfe80011fec8d7_residual_py.html create mode 100644 _static/codecov/d_e3bc015ca2131802_average_metric_py.html create mode 100644 _static/codecov/d_e3bc015ca2131802_trainer_py.html create mode 100644 _static/codecov/favicon_32.png create mode 100644 _static/codecov/index.html create mode 100644 _static/codecov/keybd_closed.png create mode 100644 _static/codecov/keybd_open.png create mode 100644 _static/codecov/status.json create mode 100644 _static/codecov/style.css create mode 100644 _static/custom.css create mode 100644 _static/doctools.js create mode 100644 _static/documentation_options.js create mode 100644 _static/file.png create mode 100644 _static/language_data.js create mode 100644 _static/minus.png create mode 100644 _static/plus.png create mode 100644 _static/pygments.css create mode 100644 _static/searchtools.js create mode 100644 _static/sphinx_highlight.js create mode 100644 coverage.html create mode 100644 examples.html create mode 100644 genindex.html create mode 100644 index.html create mode 100644 modules.html create mode 100644 nos.benchmarks.html create mode 100644 nos.data.html create mode 100644 nos.html create mode 100644 nos.metrics.html create mode 100644 nos.networks.html create mode 100644 nos.operators.html create mode 100644 nos.trainers.html create mode 100644 nos.transforms.html create mode 100644 objects.inv create mode 100644 py-modindex.html create mode 100644 search.html create mode 100644 searchindex.js diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 00000000..fc847633 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 343d47b1cca2da89436b36abcb430701 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/_sources/coverage.rst.txt b/_sources/coverage.rst.txt new file mode 100644 index 00000000..412a2b1f --- /dev/null +++ b/_sources/coverage.rst.txt @@ -0,0 +1,4 @@ +Code Coverage +============= + + For an in detail report on the code coverage `click here <_static/codecov/index.html>`_. diff --git a/_sources/examples.rst.txt b/_sources/examples.rst.txt new file mode 100644 index 00000000..b90c0e1c --- /dev/null +++ b/_sources/examples.rst.txt @@ -0,0 +1,3 @@ +======== +Examples +======== diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 00000000..7cd2a19c --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,31 @@ +=========================================== +Neural Operators for Helmholtz Equation +=========================================== + +Welcome to the documentation for the Neural Operators for Helmholtz Equation project! This project focuses on the application of advanced neural operator techniques to solve the Helmholtz equation in various parameterized geometries. The approach integrates deep learning with physical modeling of domains. + +Contents +======== + +.. toctree:: + :maxdepth: 2 + + coverage + modules + examples + +Introduction +============ + +The Helmholtz equation is a fundamental partial differential equation in physics, particularly in the fields of acoustics, electromagnetics, and quantum mechanics. In this project, we develop and utilize neural operators, a form of deep learning model, to solve the Helmholtz equation across different parameterized geometries. This approach aims to overcome the limitations of traditional numerical methods, offering speed and flexibility for complex geometries. + + +API Reference +============= + +The :doc:`modules` section offers detailed descriptions of the functions, classes, and methods available in this project. + +Examples +======== + +Explore practical applications and see the neural operators in action in the :doc:`examples` section. diff --git a/_sources/modules.rst.txt b/_sources/modules.rst.txt new file mode 100644 index 00000000..977c47e5 --- /dev/null +++ b/_sources/modules.rst.txt @@ -0,0 +1,7 @@ +nos +=== + +.. toctree:: + :maxdepth: 4 + + nos diff --git a/_sources/nos.benchmarks.rst.txt b/_sources/nos.benchmarks.rst.txt new file mode 100644 index 00000000..63cc3734 --- /dev/null +++ b/_sources/nos.benchmarks.rst.txt @@ -0,0 +1,29 @@ +nos.benchmarks package +====================== + +Submodules +---------- + +nos.benchmarks.benchmark module +------------------------------- + +.. automodule:: nos.benchmarks.benchmark + :members: + :undoc-members: + :show-inheritance: + +nos.benchmarks.transmission\_loss module +---------------------------------------- + +.. automodule:: nos.benchmarks.transmission_loss + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.benchmarks + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.data.rst.txt b/_sources/nos.data.rst.txt new file mode 100644 index 00000000..fdc24b6a --- /dev/null +++ b/_sources/nos.data.rst.txt @@ -0,0 +1,21 @@ +nos.data package +================ + +Submodules +---------- + +nos.data.transmssion\_loss module +--------------------------------- + +.. automodule:: nos.data.transmssion_loss + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.data + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.metrics.rst.txt b/_sources/nos.metrics.rst.txt new file mode 100644 index 00000000..3819268b --- /dev/null +++ b/_sources/nos.metrics.rst.txt @@ -0,0 +1,37 @@ +nos.metrics package +=================== + +Submodules +---------- + +nos.metrics.error\_metrics module +--------------------------------- + +.. automodule:: nos.metrics.error_metrics + :members: + :undoc-members: + :show-inheritance: + +nos.metrics.metric module +------------------------- + +.. automodule:: nos.metrics.metric + :members: + :undoc-members: + :show-inheritance: + +nos.metrics.operator\_metrics module +------------------------------------ + +.. automodule:: nos.metrics.operator_metrics + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.metrics + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.networks.rst.txt b/_sources/nos.networks.rst.txt new file mode 100644 index 00000000..2c21660a --- /dev/null +++ b/_sources/nos.networks.rst.txt @@ -0,0 +1,21 @@ +nos.networks package +==================== + +Submodules +---------- + +nos.networks.residual module +---------------------------- + +.. automodule:: nos.networks.residual + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.networks + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.operators.rst.txt b/_sources/nos.operators.rst.txt new file mode 100644 index 00000000..32791d02 --- /dev/null +++ b/_sources/nos.operators.rst.txt @@ -0,0 +1,37 @@ +nos.operators package +===================== + +Submodules +---------- + +nos.operators.deep\_dot\_operator module +---------------------------------------- + +.. automodule:: nos.operators.deep_dot_operator + :members: + :undoc-members: + :show-inheritance: + +nos.operators.deep\_root\_operator module +----------------------------------------- + +.. automodule:: nos.operators.deep_root_operator + :members: + :undoc-members: + :show-inheritance: + +nos.operators.mean\_stack\_neural\_operator module +-------------------------------------------------- + +.. automodule:: nos.operators.mean_stack_neural_operator + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.operators + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.rst.txt b/_sources/nos.rst.txt new file mode 100644 index 00000000..54853c85 --- /dev/null +++ b/_sources/nos.rst.txt @@ -0,0 +1,24 @@ +nos package +=========== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + nos.benchmarks + nos.data + nos.metrics + nos.networks + nos.operators + nos.trainers + nos.transforms + +Module contents +--------------- + +.. automodule:: nos + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.trainers.rst.txt b/_sources/nos.trainers.rst.txt new file mode 100644 index 00000000..497c104b --- /dev/null +++ b/_sources/nos.trainers.rst.txt @@ -0,0 +1,29 @@ +nos.trainers package +==================== + +Submodules +---------- + +nos.trainers.average\_metric module +----------------------------------- + +.. automodule:: nos.trainers.average_metric + :members: + :undoc-members: + :show-inheritance: + +nos.trainers.trainer module +--------------------------- + +.. automodule:: nos.trainers.trainer + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.trainers + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/nos.transforms.rst.txt b/_sources/nos.transforms.rst.txt new file mode 100644 index 00000000..a0e3c919 --- /dev/null +++ b/_sources/nos.transforms.rst.txt @@ -0,0 +1,21 @@ +nos.transforms package +====================== + +Submodules +---------- + +nos.transforms.scaling module +----------------------------- + +.. automodule:: nos.transforms.scaling + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: nos.transforms + :members: + :undoc-members: + :show-inheritance: diff --git a/_static/alabaster.css b/_static/alabaster.css new file mode 100644 index 00000000..e3174bf9 --- /dev/null +++ b/_static/alabaster.css @@ -0,0 +1,708 @@ +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: Georgia, serif; + font-size: 17px; + background-color: #fff; + color: #000; + margin: 0; + padding: 0; +} + + +div.document { + width: 940px; + margin: 30px auto 0 auto; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 220px; +} + +div.sphinxsidebar { + width: 220px; + font-size: 14px; + line-height: 1.5; +} + +hr { + border: 1px solid #B1B4B6; +} + +div.body { + background-color: #fff; + color: #3E4349; + padding: 0 30px 0 30px; +} + +div.body > .section { + text-align: left; +} + +div.footer { + width: 940px; + margin: 20px auto 30px auto; + font-size: 14px; + color: #888; + text-align: right; +} + +div.footer a { + color: #888; +} + +p.caption { + font-family: inherit; + font-size: inherit; +} + + +div.relations { + display: none; +} + + +div.sphinxsidebar { + max-height: 100%; + overflow-y: auto; +} + +div.sphinxsidebar a { + color: #444; + text-decoration: none; + border-bottom: 1px dotted #999; +} + +div.sphinxsidebar a:hover { + border-bottom: 1px solid #999; +} + +div.sphinxsidebarwrapper { + padding: 18px 10px; +} + +div.sphinxsidebarwrapper p.logo { + padding: 0; + margin: -10px 0 0 0px; + text-align: center; +} + +div.sphinxsidebarwrapper h1.logo { + margin-top: -10px; + text-align: center; + margin-bottom: 5px; + text-align: left; +} + +div.sphinxsidebarwrapper h1.logo-name { + margin-top: 0px; +} + +div.sphinxsidebarwrapper p.blurb { + margin-top: 0; + font-style: normal; +} + +div.sphinxsidebar h3, +div.sphinxsidebar h4 { + font-family: Georgia, serif; + color: #444; + font-size: 24px; + font-weight: normal; + margin: 0 0 5px 0; + padding: 0; +} + +div.sphinxsidebar h4 { + font-size: 20px; +} + +div.sphinxsidebar h3 a { + color: #444; +} + +div.sphinxsidebar p.logo a, +div.sphinxsidebar h3 a, +div.sphinxsidebar p.logo a:hover, +div.sphinxsidebar h3 a:hover { + border: none; +} + +div.sphinxsidebar p { + color: #555; + margin: 10px 0; +} + +div.sphinxsidebar ul { + margin: 10px 0; + padding: 0; + color: #000; +} + +div.sphinxsidebar ul li.toctree-l1 > a { + font-size: 120%; +} + +div.sphinxsidebar ul li.toctree-l2 > a { + font-size: 110%; +} + +div.sphinxsidebar input { + border: 1px solid #CCC; + font-family: Georgia, serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 160px; +} + +div.sphinxsidebar .search > div { + display: table-cell; +} + +div.sphinxsidebar hr { + border: none; + height: 1px; + color: #AAA; + background: #AAA; + + text-align: left; + margin-left: 0; + width: 50%; +} + +div.sphinxsidebar .badge { + border-bottom: none; +} + +div.sphinxsidebar .badge:hover { + border-bottom: none; +} + +/* To address an issue with donation coming after search */ +div.sphinxsidebar h3.donation { + margin-top: 10px; +} + +/* -- body styles ----------------------------------------------------------- */ + +a { + color: #004B6B; + text-decoration: underline; +} + +a:hover { + color: #6D4100; + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: Georgia, serif; + font-weight: normal; + margin: 30px 0px 10px 0px; + padding: 0; +} + +div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } +div.body h2 { font-size: 180%; } +div.body h3 { font-size: 150%; } +div.body h4 { font-size: 130%; } +div.body h5 { font-size: 100%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #DDD; + padding: 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + color: #444; + background: #EAEAEA; +} + +div.body p, div.body dd, div.body li { + line-height: 1.4em; +} + +div.admonition { + margin: 20px 0px; + padding: 10px 30px; + background-color: #EEE; + border: 1px solid #CCC; +} + +div.admonition tt.xref, div.admonition code.xref, div.admonition a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fafafa; +} + +div.admonition p.admonition-title { + font-family: Georgia, serif; + font-weight: normal; + font-size: 24px; + margin: 0 0 10px 0; + padding: 0; + line-height: 1; +} + +div.admonition p.last { + margin-bottom: 0; +} + +div.highlight { + background-color: #fff; +} + +dt:target, .highlight { + background: #FAF3E8; +} + +div.warning { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.danger { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.error { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.caution { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.attention { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.important { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.note { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.tip { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.hint { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.seealso { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.topic { + background-color: #EEE; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre, tt, code { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; + font-size: 0.9em; +} + +.hll { + background-color: #FFC; + margin: 0 -12px; + padding: 0 12px; + display: block; +} + +img.screenshot { +} + +tt.descname, tt.descclassname, code.descname, code.descclassname { + font-size: 0.95em; +} + +tt.descname, code.descname { + padding-right: 0.08em; +} + +img.screenshot { + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils { + border: 1px solid #888; + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils td, table.docutils th { + border: 1px solid #888; + padding: 0.25em 0.7em; +} + +table.field-list, table.footnote { + border: none; + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + +table.footnote { + margin: 15px 0; + width: 100%; + border: 1px solid #EEE; + background: #FDFDFD; + font-size: 0.9em; +} + +table.footnote + table.footnote { + margin-top: -15px; + border-top: none; +} + +table.field-list th { + padding: 0 0.8em 0 0; +} + +table.field-list td { + padding: 0; +} + +table.field-list p { + margin-bottom: 0.8em; +} + +/* Cloned from + * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68 + */ +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +table.footnote td.label { + width: .1px; + padding: 0.3em 0 0.3em 0.5em; +} + +table.footnote td { + padding: 0.3em 0.5em; +} + +dl { + margin-left: 0; + margin-right: 0; + margin-top: 0; + padding: 0; +} + +dl dd { + margin-left: 30px; +} + +blockquote { + margin: 0 0 0 30px; + padding: 0; +} + +ul, ol { + /* Matches the 30px from the narrow-screen "li > ul" selector below */ + margin: 10px 0 10px 30px; + padding: 0; +} + +pre { + background: #EEE; + padding: 7px 30px; + margin: 15px 0px; + line-height: 1.3em; +} + +div.viewcode-block:target { + background: #ffd; +} + +dl pre, blockquote pre, li pre { + margin-left: 0; + padding-left: 30px; +} + +tt, code { + background-color: #ecf0f3; + color: #222; + /* padding: 1px 2px; */ +} + +tt.xref, code.xref, a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fff; +} + +a.reference { + text-decoration: none; + border-bottom: 1px dotted #004B6B; +} + +/* Don't put an underline on images */ +a.image-reference, a.image-reference:hover { + border-bottom: none; +} + +a.reference:hover { + border-bottom: 1px solid #6D4100; +} + +a.footnote-reference { + text-decoration: none; + font-size: 0.7em; + vertical-align: top; + border-bottom: 1px dotted #004B6B; +} + +a.footnote-reference:hover { + border-bottom: 1px solid #6D4100; +} + +a:hover tt, a:hover code { + background: #EEE; +} + + +@media screen and (max-width: 870px) { + + div.sphinxsidebar { + display: none; + } + + div.document { + width: 100%; + + } + + div.documentwrapper { + margin-left: 0; + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + } + + div.bodywrapper { + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + margin-left: 0; + } + + ul { + margin-left: 0; + } + + li > ul { + /* Matches the 30px from the "ul, ol" selector above */ + margin-left: 30px; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .bodywrapper { + margin: 0; + } + + .footer { + width: auto; + } + + .github { + display: none; + } + + + +} + + + +@media screen and (max-width: 875px) { + + body { + margin: 0; + padding: 20px 30px; + } + + div.documentwrapper { + float: none; + background: #fff; + } + + div.sphinxsidebar { + display: block; + float: none; + width: 102.5%; + margin: 50px -30px -20px -30px; + padding: 10px 20px; + background: #333; + color: #FFF; + } + + div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, + div.sphinxsidebar h3 a { + color: #fff; + } + + div.sphinxsidebar a { + color: #AAA; + } + + div.sphinxsidebar p.logo { + display: none; + } + + div.document { + width: 100%; + margin: 0; + } + + div.footer { + display: none; + } + + div.bodywrapper { + margin: 0; + } + + div.body { + min-height: 0; + padding: 0; + } + + .rtd_doc_footer { + display: none; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .footer { + width: auto; + } + + .github { + display: none; + } +} + + +/* misc. */ + +.revsys-inline { + display: none!important; +} + +/* Hide ugly table cell borders in ..bibliography:: directive output */ +table.docutils.citation, table.docutils.citation td, table.docutils.citation th { + border: none; + /* Below needed in some edge cases; if not applied, bottom shadows appear */ + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + + +/* relbar */ + +.related { + line-height: 30px; + width: 100%; + font-size: 0.9rem; +} + +.related.top { + border-bottom: 1px solid #EEE; + margin-bottom: 20px; +} + +.related.bottom { + border-top: 1px solid #EEE; +} + +.related ul { + padding: 0; + margin: 0; + list-style: none; +} + +.related li { + display: inline; +} + +nav#rellinks { + float: right; +} + +nav#rellinks li+li:before { + content: "|"; +} + +nav#breadcrumbs li+li:before { + content: "\00BB"; +} + +/* Hide certain items when printing */ +@media print { + div.related { + display: none; + } +} \ No newline at end of file diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 00000000..4157edf2 --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: inherit; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/codecov/coverage_html.js b/_static/codecov/coverage_html.js new file mode 100644 index 00000000..59348828 --- /dev/null +++ b/_static/codecov/coverage_html.js @@ -0,0 +1,624 @@ +// Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 +// For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt + +// Coverage.py HTML report browser code. +/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ +/*global coverage: true, document, window, $ */ + +coverage = {}; + +// General helpers +function debounce(callback, wait) { + let timeoutId = null; + return function(...args) { + clearTimeout(timeoutId); + timeoutId = setTimeout(() => { + callback.apply(this, args); + }, wait); + }; +}; + +function checkVisible(element) { + const rect = element.getBoundingClientRect(); + const viewBottom = Math.max(document.documentElement.clientHeight, window.innerHeight); + const viewTop = 30; + return !(rect.bottom < viewTop || rect.top >= viewBottom); +} + +function on_click(sel, fn) { + const elt = document.querySelector(sel); + if (elt) { + elt.addEventListener("click", fn); + } +} + +// Helpers for table sorting +function getCellValue(row, column = 0) { + const cell = row.cells[column] // nosemgrep: eslint.detect-object-injection + if (cell.childElementCount == 1) { + const child = cell.firstElementChild + if (child instanceof HTMLTimeElement && child.dateTime) { + return child.dateTime + } else if (child instanceof HTMLDataElement && child.value) { + return child.value + } + } + return cell.innerText || cell.textContent; +} + +function rowComparator(rowA, rowB, column = 0) { + let valueA = getCellValue(rowA, column); + let valueB = getCellValue(rowB, column); + if (!isNaN(valueA) && !isNaN(valueB)) { + return valueA - valueB + } + return valueA.localeCompare(valueB, undefined, {numeric: true}); +} + +function sortColumn(th) { + // Get the current sorting direction of the selected header, + // clear state on other headers and then set the new sorting direction + const currentSortOrder = th.getAttribute("aria-sort"); + [...th.parentElement.cells].forEach(header => header.setAttribute("aria-sort", "none")); + if (currentSortOrder === "none") { + th.setAttribute("aria-sort", th.dataset.defaultSortOrder || "ascending"); + } else { + th.setAttribute("aria-sort", currentSortOrder === "ascending" ? "descending" : "ascending"); + } + + const column = [...th.parentElement.cells].indexOf(th) + + // Sort all rows and afterwards append them in order to move them in the DOM + Array.from(th.closest("table").querySelectorAll("tbody tr")) + .sort((rowA, rowB) => rowComparator(rowA, rowB, column) * (th.getAttribute("aria-sort") === "ascending" ? 1 : -1)) + .forEach(tr => tr.parentElement.appendChild(tr) ); +} + +// Find all the elements with data-shortcut attribute, and use them to assign a shortcut key. +coverage.assign_shortkeys = function () { + document.querySelectorAll("[data-shortcut]").forEach(element => { + document.addEventListener("keypress", event => { + if (event.target.tagName.toLowerCase() === "input") { + return; // ignore keypress from search filter + } + if (event.key === element.dataset.shortcut) { + element.click(); + } + }); + }); +}; + +// Create the events for the filter box. +coverage.wire_up_filter = function () { + // Cache elements. + const table = document.querySelector("table.index"); + const table_body_rows = table.querySelectorAll("tbody tr"); + const no_rows = document.getElementById("no_rows"); + + // Observe filter keyevents. + document.getElementById("filter").addEventListener("input", debounce(event => { + // Keep running total of each metric, first index contains number of shown rows + const totals = new Array(table.rows[0].cells.length).fill(0); + // Accumulate the percentage as fraction + totals[totals.length - 1] = { "numer": 0, "denom": 0 }; // nosemgrep: eslint.detect-object-injection + + // Hide / show elements. + table_body_rows.forEach(row => { + if (!row.cells[0].textContent.includes(event.target.value)) { + // hide + row.classList.add("hidden"); + return; + } + + // show + row.classList.remove("hidden"); + totals[0]++; + + for (let column = 1; column < totals.length; column++) { + // Accumulate dynamic totals + cell = row.cells[column] // nosemgrep: eslint.detect-object-injection + if (column === totals.length - 1) { + // Last column contains percentage + const [numer, denom] = cell.dataset.ratio.split(" "); + totals[column]["numer"] += parseInt(numer, 10); // nosemgrep: eslint.detect-object-injection + totals[column]["denom"] += parseInt(denom, 10); // nosemgrep: eslint.detect-object-injection + } else { + totals[column] += parseInt(cell.textContent, 10); // nosemgrep: eslint.detect-object-injection + } + } + }); + + // Show placeholder if no rows will be displayed. + if (!totals[0]) { + // Show placeholder, hide table. + no_rows.style.display = "block"; + table.style.display = "none"; + return; + } + + // Hide placeholder, show table. + no_rows.style.display = null; + table.style.display = null; + + const footer = table.tFoot.rows[0]; + // Calculate new dynamic sum values based on visible rows. + for (let column = 1; column < totals.length; column++) { + // Get footer cell element. + const cell = footer.cells[column]; // nosemgrep: eslint.detect-object-injection + + // Set value into dynamic footer cell element. + if (column === totals.length - 1) { + // Percentage column uses the numerator and denominator, + // and adapts to the number of decimal places. + const match = /\.([0-9]+)/.exec(cell.textContent); + const places = match ? match[1].length : 0; + const { numer, denom } = totals[column]; // nosemgrep: eslint.detect-object-injection + cell.dataset.ratio = `${numer} ${denom}`; + // Check denom to prevent NaN if filtered files contain no statements + cell.textContent = denom + ? `${(numer * 100 / denom).toFixed(places)}%` + : `${(100).toFixed(places)}%`; + } else { + cell.textContent = totals[column]; // nosemgrep: eslint.detect-object-injection + } + } + })); + + // Trigger change event on setup, to force filter on page refresh + // (filter value may still be present). + document.getElementById("filter").dispatchEvent(new Event("input")); +}; + +coverage.INDEX_SORT_STORAGE = "COVERAGE_INDEX_SORT_2"; + +// Loaded on index.html +coverage.index_ready = function () { + coverage.assign_shortkeys(); + coverage.wire_up_filter(); + document.querySelectorAll("[data-sortable] th[aria-sort]").forEach( + th => th.addEventListener("click", e => sortColumn(e.target)) + ); + + // Look for a localStorage item containing previous sort settings: + const stored_list = localStorage.getItem(coverage.INDEX_SORT_STORAGE); + + if (stored_list) { + const {column, direction} = JSON.parse(stored_list); + const th = document.querySelector("[data-sortable]").tHead.rows[0].cells[column]; // nosemgrep: eslint.detect-object-injection + th.setAttribute("aria-sort", direction === "ascending" ? "descending" : "ascending"); + th.click() + } + + // Watch for page unload events so we can save the final sort settings: + window.addEventListener("unload", function () { + const th = document.querySelector('[data-sortable] th[aria-sort="ascending"], [data-sortable] [aria-sort="descending"]'); + if (!th) { + return; + } + localStorage.setItem(coverage.INDEX_SORT_STORAGE, JSON.stringify({ + column: [...th.parentElement.cells].indexOf(th), + direction: th.getAttribute("aria-sort"), + })); + }); + + on_click(".button_prev_file", coverage.to_prev_file); + on_click(".button_next_file", coverage.to_next_file); + + on_click(".button_show_hide_help", coverage.show_hide_help); +}; + +// -- pyfile stuff -- + +coverage.LINE_FILTERS_STORAGE = "COVERAGE_LINE_FILTERS"; + +coverage.pyfile_ready = function () { + // If we're directed to a particular line number, highlight the line. + var frag = location.hash; + if (frag.length > 2 && frag[1] === "t") { + document.querySelector(frag).closest(".n").classList.add("highlight"); + coverage.set_sel(parseInt(frag.substr(2), 10)); + } else { + coverage.set_sel(0); + } + + on_click(".button_toggle_run", coverage.toggle_lines); + on_click(".button_toggle_mis", coverage.toggle_lines); + on_click(".button_toggle_exc", coverage.toggle_lines); + on_click(".button_toggle_par", coverage.toggle_lines); + + on_click(".button_next_chunk", coverage.to_next_chunk_nicely); + on_click(".button_prev_chunk", coverage.to_prev_chunk_nicely); + on_click(".button_top_of_page", coverage.to_top); + on_click(".button_first_chunk", coverage.to_first_chunk); + + on_click(".button_prev_file", coverage.to_prev_file); + on_click(".button_next_file", coverage.to_next_file); + on_click(".button_to_index", coverage.to_index); + + on_click(".button_show_hide_help", coverage.show_hide_help); + + coverage.filters = undefined; + try { + coverage.filters = localStorage.getItem(coverage.LINE_FILTERS_STORAGE); + } catch(err) {} + + if (coverage.filters) { + coverage.filters = JSON.parse(coverage.filters); + } + else { + coverage.filters = {run: false, exc: true, mis: true, par: true}; + } + + for (cls in coverage.filters) { + coverage.set_line_visibilty(cls, coverage.filters[cls]); // nosemgrep: eslint.detect-object-injection + } + + coverage.assign_shortkeys(); + coverage.init_scroll_markers(); + coverage.wire_up_sticky_header(); + + document.querySelectorAll("[id^=ctxs]").forEach( + cbox => cbox.addEventListener("click", coverage.expand_contexts) + ); + + // Rebuild scroll markers when the window height changes. + window.addEventListener("resize", coverage.build_scroll_markers); +}; + +coverage.toggle_lines = function (event) { + const btn = event.target.closest("button"); + const category = btn.value + const show = !btn.classList.contains("show_" + category); + coverage.set_line_visibilty(category, show); + coverage.build_scroll_markers(); + coverage.filters[category] = show; + try { + localStorage.setItem(coverage.LINE_FILTERS_STORAGE, JSON.stringify(coverage.filters)); + } catch(err) {} +}; + +coverage.set_line_visibilty = function (category, should_show) { + const cls = "show_" + category; + const btn = document.querySelector(".button_toggle_" + category); + if (btn) { + if (should_show) { + document.querySelectorAll("#source ." + category).forEach(e => e.classList.add(cls)); + btn.classList.add(cls); + } + else { + document.querySelectorAll("#source ." + category).forEach(e => e.classList.remove(cls)); + btn.classList.remove(cls); + } + } +}; + +// Return the nth line div. +coverage.line_elt = function (n) { + return document.getElementById("t" + n)?.closest("p"); +}; + +// Set the selection. b and e are line numbers. +coverage.set_sel = function (b, e) { + // The first line selected. + coverage.sel_begin = b; + // The next line not selected. + coverage.sel_end = (e === undefined) ? b+1 : e; +}; + +coverage.to_top = function () { + coverage.set_sel(0, 1); + coverage.scroll_window(0); +}; + +coverage.to_first_chunk = function () { + coverage.set_sel(0, 1); + coverage.to_next_chunk(); +}; + +coverage.to_prev_file = function () { + window.location = document.getElementById("prevFileLink").href; +} + +coverage.to_next_file = function () { + window.location = document.getElementById("nextFileLink").href; +} + +coverage.to_index = function () { + location.href = document.getElementById("indexLink").href; +} + +coverage.show_hide_help = function () { + const helpCheck = document.getElementById("help_panel_state") + helpCheck.checked = !helpCheck.checked; +} + +// Return a string indicating what kind of chunk this line belongs to, +// or null if not a chunk. +coverage.chunk_indicator = function (line_elt) { + const classes = line_elt?.className; + if (!classes) { + return null; + } + const match = classes.match(/\bshow_\w+\b/); + if (!match) { + return null; + } + return match[0]; +}; + +coverage.to_next_chunk = function () { + const c = coverage; + + // Find the start of the next colored chunk. + var probe = c.sel_end; + var chunk_indicator, probe_line; + while (true) { + probe_line = c.line_elt(probe); + if (!probe_line) { + return; + } + chunk_indicator = c.chunk_indicator(probe_line); + if (chunk_indicator) { + break; + } + probe++; + } + + // There's a next chunk, `probe` points to it. + var begin = probe; + + // Find the end of this chunk. + var next_indicator = chunk_indicator; + while (next_indicator === chunk_indicator) { + probe++; + probe_line = c.line_elt(probe); + next_indicator = c.chunk_indicator(probe_line); + } + c.set_sel(begin, probe); + c.show_selection(); +}; + +coverage.to_prev_chunk = function () { + const c = coverage; + + // Find the end of the prev colored chunk. + var probe = c.sel_begin-1; + var probe_line = c.line_elt(probe); + if (!probe_line) { + return; + } + var chunk_indicator = c.chunk_indicator(probe_line); + while (probe > 1 && !chunk_indicator) { + probe--; + probe_line = c.line_elt(probe); + if (!probe_line) { + return; + } + chunk_indicator = c.chunk_indicator(probe_line); + } + + // There's a prev chunk, `probe` points to its last line. + var end = probe+1; + + // Find the beginning of this chunk. + var prev_indicator = chunk_indicator; + while (prev_indicator === chunk_indicator) { + probe--; + if (probe <= 0) { + return; + } + probe_line = c.line_elt(probe); + prev_indicator = c.chunk_indicator(probe_line); + } + c.set_sel(probe+1, end); + c.show_selection(); +}; + +// Returns 0, 1, or 2: how many of the two ends of the selection are on +// the screen right now? +coverage.selection_ends_on_screen = function () { + if (coverage.sel_begin === 0) { + return 0; + } + + const begin = coverage.line_elt(coverage.sel_begin); + const end = coverage.line_elt(coverage.sel_end-1); + + return ( + (checkVisible(begin) ? 1 : 0) + + (checkVisible(end) ? 1 : 0) + ); +}; + +coverage.to_next_chunk_nicely = function () { + if (coverage.selection_ends_on_screen() === 0) { + // The selection is entirely off the screen: + // Set the top line on the screen as selection. + + // This will select the top-left of the viewport + // As this is most likely the span with the line number we take the parent + const line = document.elementFromPoint(0, 0).parentElement; + if (line.parentElement !== document.getElementById("source")) { + // The element is not a source line but the header or similar + coverage.select_line_or_chunk(1); + } else { + // We extract the line number from the id + coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); + } + } + coverage.to_next_chunk(); +}; + +coverage.to_prev_chunk_nicely = function () { + if (coverage.selection_ends_on_screen() === 0) { + // The selection is entirely off the screen: + // Set the lowest line on the screen as selection. + + // This will select the bottom-left of the viewport + // As this is most likely the span with the line number we take the parent + const line = document.elementFromPoint(document.documentElement.clientHeight-1, 0).parentElement; + if (line.parentElement !== document.getElementById("source")) { + // The element is not a source line but the header or similar + coverage.select_line_or_chunk(coverage.lines_len); + } else { + // We extract the line number from the id + coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); + } + } + coverage.to_prev_chunk(); +}; + +// Select line number lineno, or if it is in a colored chunk, select the +// entire chunk +coverage.select_line_or_chunk = function (lineno) { + var c = coverage; + var probe_line = c.line_elt(lineno); + if (!probe_line) { + return; + } + var the_indicator = c.chunk_indicator(probe_line); + if (the_indicator) { + // The line is in a highlighted chunk. + // Search backward for the first line. + var probe = lineno; + var indicator = the_indicator; + while (probe > 0 && indicator === the_indicator) { + probe--; + probe_line = c.line_elt(probe); + if (!probe_line) { + break; + } + indicator = c.chunk_indicator(probe_line); + } + var begin = probe + 1; + + // Search forward for the last line. + probe = lineno; + indicator = the_indicator; + while (indicator === the_indicator) { + probe++; + probe_line = c.line_elt(probe); + indicator = c.chunk_indicator(probe_line); + } + + coverage.set_sel(begin, probe); + } + else { + coverage.set_sel(lineno); + } +}; + +coverage.show_selection = function () { + // Highlight the lines in the chunk + document.querySelectorAll("#source .highlight").forEach(e => e.classList.remove("highlight")); + for (let probe = coverage.sel_begin; probe < coverage.sel_end; probe++) { + coverage.line_elt(probe).querySelector(".n").classList.add("highlight"); + } + + coverage.scroll_to_selection(); +}; + +coverage.scroll_to_selection = function () { + // Scroll the page if the chunk isn't fully visible. + if (coverage.selection_ends_on_screen() < 2) { + const element = coverage.line_elt(coverage.sel_begin); + coverage.scroll_window(element.offsetTop - 60); + } +}; + +coverage.scroll_window = function (to_pos) { + window.scroll({top: to_pos, behavior: "smooth"}); +}; + +coverage.init_scroll_markers = function () { + // Init some variables + coverage.lines_len = document.querySelectorAll("#source > p").length; + + // Build html + coverage.build_scroll_markers(); +}; + +coverage.build_scroll_markers = function () { + const temp_scroll_marker = document.getElementById("scroll_marker") + if (temp_scroll_marker) temp_scroll_marker.remove(); + // Don't build markers if the window has no scroll bar. + if (document.body.scrollHeight <= window.innerHeight) { + return; + } + + const marker_scale = window.innerHeight / document.body.scrollHeight; + const line_height = Math.min(Math.max(3, window.innerHeight / coverage.lines_len), 10); + + let previous_line = -99, last_mark, last_top; + + const scroll_marker = document.createElement("div"); + scroll_marker.id = "scroll_marker"; + document.getElementById("source").querySelectorAll( + "p.show_run, p.show_mis, p.show_exc, p.show_exc, p.show_par" + ).forEach(element => { + const line_top = Math.floor(element.offsetTop * marker_scale); + const line_number = parseInt(element.querySelector(".n a").id.substr(1)); + + if (line_number === previous_line + 1) { + // If this solid missed block just make previous mark higher. + last_mark.style.height = `${line_top + line_height - last_top}px`; + } else { + // Add colored line in scroll_marker block. + last_mark = document.createElement("div"); + last_mark.id = `m${line_number}`; + last_mark.classList.add("marker"); + last_mark.style.height = `${line_height}px`; + last_mark.style.top = `${line_top}px`; + scroll_marker.append(last_mark); + last_top = line_top; + } + + previous_line = line_number; + }); + + // Append last to prevent layout calculation + document.body.append(scroll_marker); +}; + +coverage.wire_up_sticky_header = function () { + const header = document.querySelector("header"); + const header_bottom = ( + header.querySelector(".content h2").getBoundingClientRect().top - + header.getBoundingClientRect().top + ); + + function updateHeader() { + if (window.scrollY > header_bottom) { + header.classList.add("sticky"); + } else { + header.classList.remove("sticky"); + } + } + + window.addEventListener("scroll", updateHeader); + updateHeader(); +}; + +coverage.expand_contexts = function (e) { + var ctxs = e.target.parentNode.querySelector(".ctxs"); + + if (!ctxs.classList.contains("expanded")) { + var ctxs_text = ctxs.textContent; + var width = Number(ctxs_text[0]); + ctxs.textContent = ""; + for (var i = 1; i < ctxs_text.length; i += width) { + key = ctxs_text.substring(i, i + width).trim(); + ctxs.appendChild(document.createTextNode(contexts[key])); + ctxs.appendChild(document.createElement("br")); + } + ctxs.classList.add("expanded"); + } +}; + +document.addEventListener("DOMContentLoaded", () => { + if (document.body.classList.contains("indexfile")) { + coverage.index_ready(); + } else { + coverage.pyfile_ready(); + } +}); diff --git a/_static/codecov/d_040e78c194e497af_scaling_py.html b/_static/codecov/d_040e78c194e497af_scaling_py.html new file mode 100644 index 00000000..958d2c69 --- /dev/null +++ b/_static/codecov/d_040e78c194e497af_scaling_py.html @@ -0,0 +1,111 @@ + + + + + Coverage for src/nos/transforms/scaling.py: 89% + + + + + +
+
+

+ Coverage for src/nos/transforms/scaling.py: + 89% +

+ +

+ 9 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import torch 

+

2from continuity.transforms import ( 

+

3 Transform, 

+

4) 

+

5 

+

6 

+

7class MinMaxScale(Transform): 

+

8 def __init__(self, min_value: torch.Tensor, max_value: torch.Tensor): 

+

9 self.min_value = min_value 

+

10 self.max_value = max_value 

+

11 super().__init__() 

+

12 

+

13 def forward(self, tensor: torch.Tensor) -> torch.Tensor: 

+

14 return (tensor - self.min_value) / (self.max_value - self.min_value) 

+
+ + + diff --git a/_static/codecov/d_93093ff38057b02c_deep_dot_operator_py.html b/_static/codecov/d_93093ff38057b02c_deep_dot_operator_py.html new file mode 100644 index 00000000..f46a01f4 --- /dev/null +++ b/_static/codecov/d_93093ff38057b02c_deep_dot_operator_py.html @@ -0,0 +1,155 @@ + + + + + Coverage for src/nos/operators/deep_dot_operator.py: 100% + + + + + +
+
+

+ Coverage for src/nos/operators/deep_dot_operator.py: + 100% +

+ +

+ 28 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import torch 

+

2import torch.nn as nn 

+

3from continuity.operators import ( 

+

4 Operator, 

+

5) 

+

6from continuity.operators.shape import ( 

+

7 OperatorShapes, 

+

8) 

+

9 

+

10from nos.networks import ( 

+

11 ResNet, 

+

12) 

+

13 

+

14 

+

15class DeepDotOperator(Operator): 

+

16 def __init__( 

+

17 self, 

+

18 shapes: OperatorShapes, 

+

19 branch_width: int = 16, 

+

20 branch_depth: int = 2, 

+

21 trunk_width: int = 16, 

+

22 trunk_depth: int = 2, 

+

23 dot_width: int = 16, 

+

24 dot_depth: int = 2, 

+

25 act: nn.Module = nn.Tanh, 

+

26 stride: int = 1, 

+

27 ): 

+

28 super().__init__() 

+

29 self.shapes = shapes 

+

30 

+

31 # branch network 

+

32 dot_branch_width = dot_width // 2 + dot_depth % 2 

+

33 self.branch_lift = nn.Linear(shapes.u.num * shapes.u.dim, branch_width) 

+

34 self.branch_hidden = ResNet(width=branch_width, depth=branch_depth, act=act, stride=stride) 

+

35 self.branch_project = nn.Linear(branch_width, dot_branch_width) 

+

36 self.branch = nn.Sequential(self.branch_lift, self.branch_hidden, self.branch_project) 

+

37 

+

38 # root branch convolution 

+

39 self.root_branch_conv = nn.Conv1d(2, 1, 1) 

+

40 

+

41 # trunk network 

+

42 dot_trunk_width = dot_width // 2 

+

43 self.trunk_lift = nn.Linear(shapes.y.dim, trunk_width) 

+

44 self.trunk_hidden = ResNet(width=trunk_width, depth=trunk_depth, act=act, stride=stride) 

+

45 self.trunk_project = nn.Linear(trunk_width, dot_trunk_width) 

+

46 self.trunk = nn.Sequential(self.trunk_lift, self.trunk_hidden, self.trunk_project) 

+

47 

+

48 # deep dot 

+

49 self.deep_dot_hidden = ResNet(width=dot_width, depth=dot_depth, act=act, stride=stride) 

+

50 self.deep_dot_project = nn.Linear(dot_width, shapes.v.dim) 

+

51 self.deep_dot = nn.Sequential(self.deep_dot_hidden, self.deep_dot_project) 

+

52 

+

53 def forward(self, x: torch.Tensor, u: torch.Tensor, y: torch.Tensor) -> torch.Tensor: 

+

54 branch_out = self.branch(u.flatten(-2, -1)).unsqueeze(1).expand(-1, y.size(1), -1) 

+

55 trunk_out = self.trunk(y) 

+

56 

+

57 dot_cat = torch.cat([branch_out, trunk_out], dim=-1) 

+

58 return self.deep_dot(dot_cat) 

+
+ + + diff --git a/_static/codecov/d_93093ff38057b02c_deep_root_operator_py.html b/_static/codecov/d_93093ff38057b02c_deep_root_operator_py.html new file mode 100644 index 00000000..1abc9710 --- /dev/null +++ b/_static/codecov/d_93093ff38057b02c_deep_root_operator_py.html @@ -0,0 +1,167 @@ + + + + + Coverage for src/nos/operators/deep_root_operator.py: 100% + + + + + +
+
+

+ Coverage for src/nos/operators/deep_root_operator.py: + 100% +

+ +

+ 35 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import torch 

+

2import torch.nn as nn 

+

3from continuity.operators import ( 

+

4 Operator, 

+

5) 

+

6from continuity.operators.shape import ( 

+

7 OperatorShapes, 

+

8) 

+

9 

+

10from nos.networks import ( 

+

11 ResNet, 

+

12) 

+

13 

+

14 

+

15class DeepRootOperator(Operator): 

+

16 def __init__( 

+

17 self, 

+

18 shapes: OperatorShapes, 

+

19 root_width: int = 16, 

+

20 root_depth: int = 1, 

+

21 branch_width: int = 16, 

+

22 branch_depth: int = 2, 

+

23 trunk_width: int = 16, 

+

24 trunk_depth: int = 2, 

+

25 dot_width: int = 16, 

+

26 dot_depth: int = 2, 

+

27 act: nn.Module = nn.Tanh, 

+

28 stride: int = 1, 

+

29 ): 

+

30 super().__init__() 

+

31 self.shapes = shapes 

+

32 

+

33 # root network 

+

34 dot_root_branch_width = dot_width // 2 + dot_depth % 2 

+

35 self.root_lift = nn.Linear(shapes.x.num * shapes.x.dim, root_width) 

+

36 self.root_hidden = ResNet(width=root_width, depth=root_depth, act=act, stride=stride) 

+

37 self.root_project = nn.Linear(root_width, dot_root_branch_width) 

+

38 self.root = nn.Sequential(self.root_lift, self.root_hidden, self.root_project) 

+

39 

+

40 # branch network 

+

41 self.branch_lift = nn.Linear(shapes.u.num * shapes.u.dim, branch_width) 

+

42 self.branch_hidden = ResNet(width=branch_width, depth=branch_depth, act=act, stride=stride) 

+

43 self.branch_project = nn.Linear(branch_width, dot_width // 2) 

+

44 self.branch = nn.Sequential(self.branch_lift, self.branch_hidden, self.branch_project) 

+

45 

+

46 # root branch convolution 

+

47 self.root_branch_conv = nn.Conv1d(2, 1, 1) 

+

48 

+

49 # trunk network 

+

50 dot_trunk_width = dot_width // 2 

+

51 self.trunk_lift = nn.Linear(shapes.y.dim, trunk_width) 

+

52 self.trunk_hidden = ResNet(width=trunk_width, depth=trunk_depth, act=act, stride=stride) 

+

53 self.trunk_project = nn.Linear(trunk_width, dot_trunk_width) 

+

54 self.trunk = nn.Sequential(self.trunk_lift, self.trunk_hidden, self.trunk_project) 

+

55 

+

56 # deep dot 

+

57 self.deep_dot_hidden = ResNet(width=dot_width, depth=dot_depth, act=act, stride=stride) 

+

58 self.deep_dot_project = nn.Linear(dot_width, shapes.v.dim) 

+

59 self.deep_dot = nn.Sequential(self.deep_dot_hidden, self.deep_dot_project) 

+

60 

+

61 def forward(self, x: torch.Tensor, u: torch.Tensor, y: torch.Tensor) -> torch.Tensor: 

+

62 root_out = self.root(x.flatten(-2, -1)) 

+

63 branch_out = self.branch(u.flatten(-2, -1)) 

+

64 root_branch_cat = torch.cat([root_out.unsqueeze(-2), branch_out.unsqueeze(-2)], dim=-2) 

+

65 conv_out = self.root_branch_conv(root_branch_cat).expand(-1, y.size(1), -1) 

+

66 

+

67 trunk_out = self.trunk(y) 

+

68 

+

69 dot_cat = torch.cat([conv_out, trunk_out], dim=-1) 

+

70 return self.deep_dot(dot_cat) 

+
+ + + diff --git a/_static/codecov/d_93093ff38057b02c_mean_stack_neural_operator_py.html b/_static/codecov/d_93093ff38057b02c_mean_stack_neural_operator_py.html new file mode 100644 index 00000000..eeee66e1 --- /dev/null +++ b/_static/codecov/d_93093ff38057b02c_mean_stack_neural_operator_py.html @@ -0,0 +1,171 @@ + + + + + Coverage for src/nos/operators/mean_stack_neural_operator.py: 100% + + + + + +
+
+

+ Coverage for src/nos/operators/mean_stack_neural_operator.py: + 100% +

+ +

+ 18 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1""" 

+

2`continuity.operators.deep_neural_operator` 

+

3 

+

4The Deep Neural Operator architecture. 

+

5""" 

+

6 

+

7import torch 

+

8from continuity.operators import ( 

+

9 Operator, 

+

10) 

+

11from continuity.operators.common import ( 

+

12 DeepResidualNetwork, 

+

13) 

+

14from continuity.operators.shape import ( 

+

15 OperatorShapes, 

+

16) 

+

17 

+

18 

+

19class MeanStackNeuralOperator(Operator): 

+

20 """ 

+

21 The `MeanStackNeuralOperator` class integrates a deep residual network within a neural operator framework. It uses all 

+

22 scalar values of the input locations, input functions, and individual evaluation points as inputs for a deep 

+

23 residual network. 

+

24 

+

25 Args: 

+

26 shapes: An instance of `DatasetShapes`. 

+

27 width: The width of the Deep Residual Network, defining the number of neurons in each hidden layer. 

+

28 depth: The depth of the Deep Residual Network, indicating the number of hidden layers in the network. 

+

29 

+

30 """ 

+

31 

+

32 def __init__(self, shapes: OperatorShapes, width: int = 32, depth: int = 3): 

+

33 super().__init__() 

+

34 self.shapes = shapes 

+

35 

+

36 self.width = width 

+

37 self.depth = depth 

+

38 

+

39 self.net = DeepResidualNetwork( 

+

40 input_size=(shapes.y.dim + shapes.u.dim + shapes.x.dim), 

+

41 output_size=shapes.v.dim, 

+

42 width=width, 

+

43 depth=depth, 

+

44 ) 

+

45 

+

46 def forward(self, x: torch.Tensor, u: torch.Tensor, y: torch.Tensor) -> torch.Tensor: 

+

47 """Forward pass through the operator. 

+

48 

+

49 Performs the forward pass through the operator, processing the input function values `u` and input function 

+

50 probe locations `x` by flattening them. They are then expanded to match the dimensions of the evaluation 

+

51 coordinates y. The preprocessed x, preprocessed u, and y are stacked and passed through a deep residual network. 

+

52 

+

53 

+

54 Args: 

+

55 x: Input coordinates of shape (batch_size, #sensors, x_dim), representing the points in space at 

+

56 which the input function values are probed. 

+

57 u: Input function values of shape (batch_size, #sensors, u_dim), representing the values of the input 

+

58 functions at different sensor locations. 

+

59 y: Evaluation coordinates of shape (batch_size, #evaluations, y_dim), representing the points in space at 

+

60 which the output function values are to be computed. 

+

61 

+

62 Returns: 

+

63 The output of the operator, of shape (batch_size, #evaluations, v_dim), representing the computed function 

+

64 values at the specified evaluation coordinates. 

+

65 """ 

+

66 x_repeated = x.unsqueeze(1).expand(-1, y.size(1), -1, -1) 

+

67 u_repeated = u.unsqueeze(1).expand(-1, y.size(1), -1, -1) 

+

68 y_repeated = y.unsqueeze(2).expand(-1, -1, x.size(1), -1) 

+

69 

+

70 net_input = torch.cat([x_repeated, u_repeated, y_repeated], dim=-1) 

+

71 

+

72 output = self.net(net_input) 

+

73 

+

74 return torch.mean(output, dim=2) 

+
+ + + diff --git a/_static/codecov/d_99aac0089c5f15d8_error_metrics_py.html b/_static/codecov/d_99aac0089c5f15d8_error_metrics_py.html new file mode 100644 index 00000000..29e19818 --- /dev/null +++ b/_static/codecov/d_99aac0089c5f15d8_error_metrics_py.html @@ -0,0 +1,150 @@ + + + + + Coverage for src/nos/metrics/error_metrics.py: 76% + + + + + +
+
+

+ Coverage for src/nos/metrics/error_metrics.py: + 76% +

+ +

+ 21 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1from typing import ( 

+

2 Dict, 

+

3) 

+

4 

+

5import torch 

+

6from continuity.data import ( 

+

7 OperatorDataset, 

+

8) 

+

9from continuity.operators import ( 

+

10 Operator, 

+

11) 

+

12 

+

13from .metric import ( 

+

14 Metric, 

+

15) 

+

16 

+

17 

+

18class Loss(Metric): 

+

19 """Class for evaluating error metrics. 

+

20 

+

21 Args: 

+

22 name: The name of the metric. 

+

23 loss: The loss function for calculating the metric. 

+

24 

+

25 """ 

+

26 

+

27 def __init__(self, name: str, loss): 

+

28 super().__init__(name) 

+

29 self.loss = loss 

+

30 

+

31 def __call__(self, operator: Operator, dataset: OperatorDataset) -> Dict: 

+

32 operator.eval() 

+

33 prediction = operator(dataset.x, dataset.u, dataset.v) 

+

34 value = self.loss(prediction, dataset.v).item() 

+

35 value /= len(dataset) 

+

36 return { 

+

37 "Value": value, 

+

38 "Unit": "[1]", 

+

39 } 

+

40 

+

41 

+

42class L1Error(Loss): 

+

43 """L1 error metric (Mean Absolute Error).""" 

+

44 

+

45 def __init__(self): 

+

46 super().__init__("L1_error", torch.nn.L1Loss()) 

+

47 

+

48 

+

49class MSError(Loss): 

+

50 """Mean square error metric (L2 Error).""" 

+

51 

+

52 def __init__(self): 

+

53 super().__init__("MS_error", torch.nn.MSELoss()) 

+
+ + + diff --git a/_static/codecov/d_99aac0089c5f15d8_metric_py.html b/_static/codecov/d_99aac0089c5f15d8_metric_py.html new file mode 100644 index 00000000..9e46ed61 --- /dev/null +++ b/_static/codecov/d_99aac0089c5f15d8_metric_py.html @@ -0,0 +1,133 @@ + + + + + Coverage for src/nos/metrics/metric.py: 100% + + + + + +
+
+

+ Coverage for src/nos/metrics/metric.py: + 100% +

+ +

+ 9 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1from abc import ( 

+

2 ABC, 

+

3 abstractmethod, 

+

4) 

+

5from typing import ( 

+

6 Dict, 

+

7) 

+

8 

+

9from continuity.data import ( 

+

10 OperatorDataset, 

+

11) 

+

12from continuity.operators import ( 

+

13 Operator, 

+

14) 

+

15 

+

16 

+

17class Metric(ABC): 

+

18 """Base class for all metrics.""" 

+

19 

+

20 def __init__(self, name: str): 

+

21 self.name = name 

+

22 

+

23 @abstractmethod 

+

24 def __call__(self, operator: Operator, dataset: OperatorDataset) -> Dict: 

+

25 """Evaluates the metric. 

+

26 

+

27 Args: 

+

28 operator: operator for which the metric is evaluated. 

+

29 dataset: dataset on which the metric is evaluated. 

+

30 

+

31 Returns: 

+

32 dict containing the results of the metric (keys "value" and "unit" should be in the dict). 

+

33 """ 

+

34 

+

35 def __str__(self): 

+

36 return self.name 

+
+ + + diff --git a/_static/codecov/d_99aac0089c5f15d8_operator_metrics_py.html b/_static/codecov/d_99aac0089c5f15d8_operator_metrics_py.html new file mode 100644 index 00000000..eb837390 --- /dev/null +++ b/_static/codecov/d_99aac0089c5f15d8_operator_metrics_py.html @@ -0,0 +1,139 @@ + + + + + Coverage for src/nos/metrics/operator_metrics.py: 100% + + + + + +
+
+

+ Coverage for src/nos/metrics/operator_metrics.py: + 100% +

+ +

+ 22 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import time 

+

2from typing import ( 

+

3 Dict, 

+

4) 

+

5 

+

6from continuity.data import ( 

+

7 OperatorDataset, 

+

8) 

+

9from continuity.operators import ( 

+

10 Operator, 

+

11) 

+

12 

+

13from .metric import ( 

+

14 Metric, 

+

15) 

+

16 

+

17 

+

18class NumberOfParameters(Metric): 

+

19 """Number of parameters in the operator.""" 

+

20 

+

21 def __init__(self): 

+

22 super().__init__("Number_of_parameters") 

+

23 

+

24 def __call__(self, operator: Operator, dataset: OperatorDataset) -> Dict: 

+

25 num_params = sum(p.numel() for p in operator.parameters() if p.requires_grad) 

+

26 return {"Value": num_params, "Unit": "[1]"} 

+

27 

+

28 

+

29class SpeedOfEvaluation(Metric): 

+

30 """Speed of a single evaluation in milliseconds.""" 

+

31 

+

32 def __init__(self): 

+

33 super().__init__("Speed_of_evaluation") 

+

34 

+

35 def __call__(self, operator: Operator, dataset: OperatorDataset) -> Dict: 

+

36 operator.eval() 

+

37 start_time = time.time_ns() 

+

38 _ = operator(dataset.x, dataset.u, dataset.v) 

+

39 end_time = time.time_ns() 

+

40 delta_time = (end_time - start_time) * 1e-6 

+

41 delta_time = delta_time / len(dataset) 

+

42 return {"Value": delta_time, "Unit": "[ms]"} 

+
+ + + diff --git a/_static/codecov/d_a94113ed293c43ea_benchmark_py.html b/_static/codecov/d_a94113ed293c43ea_benchmark_py.html new file mode 100644 index 00000000..99616b87 --- /dev/null +++ b/_static/codecov/d_a94113ed293c43ea_benchmark_py.html @@ -0,0 +1,125 @@ + + + + + Coverage for src/nos/benchmarks/benchmark.py: 100% + + + + + +
+
+

+ Coverage for src/nos/benchmarks/benchmark.py: + 100% +

+ +

+ 9 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1from dataclasses import ( 

+

2 dataclass, 

+

3 field, 

+

4) 

+

5from typing import ( 

+

6 List, 

+

7) 

+

8 

+

9from continuity.data import ( 

+

10 OperatorDataset, 

+

11) 

+

12 

+

13from nos.metrics import ( 

+

14 L1Error, 

+

15 Metric, 

+

16 MSError, 

+

17 NumberOfParameters, 

+

18 SpeedOfEvaluation, 

+

19) 

+

20 

+

21 

+

22@dataclass 

+

23class Benchmark: 

+

24 train_set: OperatorDataset 

+

25 test_set: OperatorDataset 

+

26 metrics: List[Metric] = field( 

+

27 default_factory=lambda: [L1Error(), MSError(), NumberOfParameters(), SpeedOfEvaluation()] 

+

28 ) 

+
+ + + diff --git a/_static/codecov/d_a94113ed293c43ea_transmission_loss_py.html b/_static/codecov/d_a94113ed293c43ea_transmission_loss_py.html new file mode 100644 index 00000000..cbbe7b9a --- /dev/null +++ b/_static/codecov/d_a94113ed293c43ea_transmission_loss_py.html @@ -0,0 +1,112 @@ + + + + + Coverage for src/nos/benchmarks/transmission_loss.py: 100% + + + + + +
+
+

+ Coverage for src/nos/benchmarks/transmission_loss.py: + 100% +

+ +

+ 6 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import pathlib 

+

2 

+

3from nos.data import ( 

+

4 TLDatasetCompact, 

+

5) 

+

6 

+

7from .benchmark import ( 

+

8 Benchmark, 

+

9) 

+

10 

+

11# paths for this specific benchmark 

+

12TRAIN_PATH = pathlib.Path.cwd().joinpath("data", "train", "transmission_loss", "gw_6e-1", "dset.csv") 

+

13TEST_PATH = pathlib.Path.cwd().joinpath("data", "test", "transmission_loss", "gw_6e-1", "dset_test.csv") 

+

14 

+

15transmission_loss_const_gap = Benchmark(train_set=TLDatasetCompact(TRAIN_PATH), test_set=TLDatasetCompact(TEST_PATH)) 

+
+ + + diff --git a/_static/codecov/d_db4399fe0ac4c92a_transmssion_loss_py.html b/_static/codecov/d_db4399fe0ac4c92a_transmssion_loss_py.html new file mode 100644 index 00000000..8da26071 --- /dev/null +++ b/_static/codecov/d_db4399fe0ac4c92a_transmssion_loss_py.html @@ -0,0 +1,272 @@ + + + + + Coverage for src/nos/data/transmssion_loss.py: 53% + + + + + +
+
+

+ Coverage for src/nos/data/transmssion_loss.py: + 53% +

+ +

+ 95 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import pathlib 

+

2 

+

3import numpy as np 

+

4import pandas as pd 

+

5import torch 

+

6from continuity.data import ( 

+

7 OperatorDataset, 

+

8) 

+

9from continuity.transforms import ( 

+

10 Normalize, 

+

11) 

+

12 

+

13from nos.transforms import ( 

+

14 MinMaxScale, 

+

15) 

+

16 

+

17 

+

18class TLDataset(OperatorDataset): 

+

19 def __init__(self, csv_file: pathlib.Path): 

+

20 # retrieve data 

+

21 df = pd.read_csv(csv_file, dtype=np.float32) 

+

22 

+

23 x = torch.stack( 

+

24 [torch.tensor(df["radius"]), torch.tensor(df["inner_radius"]), torch.tensor(df["gap_width"])], dim=1 

+

25 ).reshape(-1, 1, 3) 

+

26 u = x 

+

27 y = torch.tensor(df["frequency"]).reshape(-1, 1, 1) 

+

28 v = torch.tensor(df["transmission_loss"]).unsqueeze(1).reshape(-1, 1, 1) 

+

29 

+

30 # find appropriate transformations 

+

31 means = df.mean().to_dict() 

+

32 stds = df.std().to_dict() 

+

33 min_vals = df.min().to_dict() 

+

34 max_vals = df.max().to_dict() 

+

35 

+

36 x_transforms = MinMaxScale( 

+

37 torch.tensor([min_vals["radius"], min_vals["inner_radius"], min_vals["gap_width"]]).reshape(1, 3), 

+

38 torch.tensor([max_vals["radius"], max_vals["inner_radius"], max_vals["gap_width"]]).reshape(1, 3), 

+

39 ) 

+

40 u_transforms = x_transforms 

+

41 y_transforms = Normalize( 

+

42 torch.tensor(means["frequency"]).reshape(1, 1), torch.tensor(stds["frequency"]).reshape(1, 1) 

+

43 ) 

+

44 v_transforms = Normalize( 

+

45 torch.tensor(means["transmission_loss"]).reshape(1, 1), 

+

46 torch.tensor(stds["transmission_loss"]).reshape(1, 1), 

+

47 ) 

+

48 

+

49 super().__init__(x, u, y, v, x_transforms, u_transforms, y_transforms, v_transforms) 

+

50 

+

51 

+

52class TLDatasetCompact(OperatorDataset): 

+

53 """Transmission loss dataset, with bigger evaluation space.""" 

+

54 

+

55 def __init__(self, path: pathlib.Path, n_samples: int = -1): 

+

56 if path.is_file(): 

+

57 df = pd.read_csv(path, dtype=np.float32) 

+

58 else: 

+

59 df = pd.DataFrame() 

+

60 for file in path.rglob("*.csv"): 

+

61 df_tmp = pd.read_csv(file, dtype=np.float32) 

+

62 df = pd.concat([df, df_tmp]) 

+

63 

+

64 unique_crystals = df[["radius", "inner_radius", "gap_width"]].drop_duplicates() 

+

65 

+

66 num_evals = len(df) // len(unique_crystals) 

+

67 

+

68 x = torch.empty((len(unique_crystals), 1, 3)) 

+

69 u = x 

+

70 y = torch.empty((len(unique_crystals), num_evals, 1)) 

+

71 v = torch.empty((len(unique_crystals), num_evals, 1)) 

+

72 

+

73 for i, (_, crystal) in enumerate(unique_crystals.iterrows()): 

+

74 c_df = df.loc[ 

+

75 (df["radius"] == crystal["radius"]) 

+

76 * (df["inner_radius"] == crystal["inner_radius"]) 

+

77 * (df["gap_width"] == crystal["gap_width"]) 

+

78 ] 

+

79 

+

80 x[i] = torch.tensor([crystal["radius"], crystal["inner_radius"], crystal["gap_width"]]).reshape(1, 3) 

+

81 y[i] = torch.tensor([c_df["frequency"].to_list()]).reshape(num_evals, 1) 

+

82 v[i] = torch.tensor([[c_df["transmission_loss"].to_list()]]).reshape(num_evals, 1) 

+

83 

+

84 if n_samples != -1: 

+

85 perm = torch.randperm(x.size(0)) 

+

86 idx = perm[:n_samples] 

+

87 x = x[idx] 

+

88 u = u[idx] 

+

89 y = y[idx] 

+

90 v = v[idx] 

+

91 

+

92 # find appropriate transformations 

+

93 means = df.mean().to_dict() 

+

94 stds = df.std().to_dict() 

+

95 min_vals = df.min().to_dict() 

+

96 max_vals = df.max().to_dict() 

+

97 

+

98 x_transforms = MinMaxScale( 

+

99 torch.tensor([min_vals["radius"], min_vals["inner_radius"], min_vals["gap_width"]]).reshape(1, 3), 

+

100 torch.tensor([max_vals["radius"], max_vals["inner_radius"], max_vals["gap_width"]]).reshape(1, 3), 

+

101 ) 

+

102 u_transforms = x_transforms 

+

103 y_transforms = Normalize( 

+

104 torch.tensor(means["frequency"]).reshape(1, 1), torch.tensor(stds["frequency"]).reshape(1, 1) 

+

105 ) 

+

106 v_transforms = Normalize( 

+

107 torch.tensor(means["transmission_loss"]).reshape(1, 1), 

+

108 torch.tensor(stds["transmission_loss"]).reshape(1, 1), 

+

109 ) 

+

110 

+

111 super().__init__(x, u, y, v, x_transforms, u_transforms, y_transforms, v_transforms) 

+

112 

+

113 

+

114class TLDatasetCompactExp(OperatorDataset): 

+

115 """Transmission loss dataset, with bigger evaluation space.""" 

+

116 

+

117 def __init__(self, path: pathlib.Path, n_samples: int = -1): 

+

118 if path.is_file(): 

+

119 df = pd.read_csv(path, dtype=np.float32) 

+

120 else: 

+

121 df = pd.DataFrame() 

+

122 for file in path.rglob("*.csv"): 

+

123 df_tmp = pd.read_csv(file, dtype=np.float32) 

+

124 df = pd.concat([df, df_tmp]) 

+

125 df["transmission_loss"] = df["transmission_loss"].clip(upper=0.0) 

+

126 df["transmission_loss"] = 10 ** df["transmission_loss"] 

+

127 

+

128 unique_crystals = df[["radius", "inner_radius", "gap_width"]].drop_duplicates() 

+

129 

+

130 num_evals = len(df) // len(unique_crystals) 

+

131 

+

132 x = torch.empty((len(unique_crystals), 1, 3)) 

+

133 u = x 

+

134 y = torch.empty((len(unique_crystals), num_evals, 1)) 

+

135 v = torch.empty((len(unique_crystals), num_evals, 1)) 

+

136 

+

137 for i, (_, crystal) in enumerate(unique_crystals.iterrows()): 

+

138 c_df = df.loc[ 

+

139 (df["radius"] == crystal["radius"]) 

+

140 * (df["inner_radius"] == crystal["inner_radius"]) 

+

141 * (df["gap_width"] == crystal["gap_width"]) 

+

142 ] 

+

143 

+

144 x[i] = torch.tensor([crystal["radius"], crystal["inner_radius"], crystal["gap_width"]]).reshape(1, 3) 

+

145 y[i] = torch.tensor([c_df["frequency"].to_list()]).reshape(num_evals, 1) 

+

146 v[i] = torch.tensor([[c_df["transmission_loss"].to_list()]]).reshape(num_evals, 1) 

+

147 

+

148 if n_samples != -1: 

+

149 perm = torch.randperm(x.size(0)) 

+

150 idx = perm[:n_samples] 

+

151 x = x[idx] 

+

152 u = u[idx] 

+

153 y = y[idx] 

+

154 v = v[idx] 

+

155 

+

156 # find appropriate transformations 

+

157 means = df.mean().to_dict() 

+

158 stds = df.std().to_dict() 

+

159 min_vals = df.min().to_dict() 

+

160 max_vals = df.max().to_dict() 

+

161 

+

162 x_transforms = MinMaxScale( 

+

163 torch.tensor([min_vals["radius"], min_vals["inner_radius"], min_vals["gap_width"]]).reshape(1, 3), 

+

164 torch.tensor([max_vals["radius"], max_vals["inner_radius"], max_vals["gap_width"]]).reshape(1, 3), 

+

165 ) 

+

166 u_transforms = x_transforms 

+

167 y_transforms = Normalize( 

+

168 torch.tensor(means["frequency"]).reshape(1, 1), torch.tensor(stds["frequency"]).reshape(1, 1) 

+

169 ) 

+

170 v_transforms = Normalize( 

+

171 torch.tensor(means["transmission_loss"]).reshape(1, 1), 

+

172 torch.tensor(stds["transmission_loss"]).reshape(1, 1), 

+

173 ) 

+

174 

+

175 super().__init__(x, u, y, v, x_transforms, u_transforms, y_transforms, v_transforms) 

+
+ + + diff --git a/_static/codecov/d_dfcfe80011fec8d7_residual_py.html b/_static/codecov/d_dfcfe80011fec8d7_residual_py.html new file mode 100644 index 00000000..15cea62e --- /dev/null +++ b/_static/codecov/d_dfcfe80011fec8d7_residual_py.html @@ -0,0 +1,144 @@ + + + + + Coverage for src/nos/networks/residual.py: 96% + + + + + +
+
+

+ Coverage for src/nos/networks/residual.py: + 96% +

+ +

+ 27 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1from typing import ( 

+

2 List, 

+

3) 

+

4 

+

5import torch 

+

6import torch.nn as nn 

+

7 

+

8 

+

9class ResBlock(nn.Module): 

+

10 def __init__(self, width: int, depth: int, act: nn.Module): 

+

11 super().__init__() 

+

12 

+

13 self.net = nn.Sequential() 

+

14 for i in range(depth): 

+

15 self.net.add_module(f"linear_{i}", torch.nn.Linear(width, width)) 

+

16 self.net.add_module(f"Act_{i}", act()) 

+

17 

+

18 def forward(self, x: torch.Tensor): 

+

19 out = self.net(x) 

+

20 return out + x 

+

21 

+

22 

+

23class ResNet(nn.Module): 

+

24 def __init__( 

+

25 self, 

+

26 width: int, 

+

27 depth: int, 

+

28 act: nn.Module, 

+

29 stride: int = 1, 

+

30 transition_transformations: List[nn.Module] = None, 

+

31 ): 

+

32 super().__init__() 

+

33 

+

34 if transition_transformations is None: 

+

35 transition_transformations = [] 

+

36 

+

37 assert depth % stride == 0 

+

38 n_blocks = depth // stride 

+

39 

+

40 self.net = nn.Sequential() 

+

41 for i in range(n_blocks): 

+

42 self.net.add_module(f"ResBlock_{i}", ResBlock(width=width, depth=depth, act=act)) 

+

43 for j, transformation in enumerate(transition_transformations): 

+

44 self.net.add_module(f"Transformation_{i}_{j}", transformation()) 

+

45 

+

46 def forward(self, x: torch.Tensor): 

+

47 return self.net(x) 

+
+ + + diff --git a/_static/codecov/d_e3bc015ca2131802_average_metric_py.html b/_static/codecov/d_e3bc015ca2131802_average_metric_py.html new file mode 100644 index 00000000..fe72f148 --- /dev/null +++ b/_static/codecov/d_e3bc015ca2131802_average_metric_py.html @@ -0,0 +1,124 @@ + + + + + Coverage for src/nos/trainers/average_metric.py: 0% + + + + + +
+
+

+ Coverage for src/nos/trainers/average_metric.py: + 0% +

+ +

+ 21 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1class AverageMetric: 

+

2 def __init__(self, name: str, fmt: str = ":f"): 

+

3 self.name = name 

+

4 self.fmt = fmt 

+

5 self.val = 0.0 

+

6 self.n_items = 0 

+

7 self.avg = 0.0 

+

8 

+

9 def __call__(self): 

+

10 return self.avg 

+

11 

+

12 def reset(self): 

+

13 self.val = 0.0 

+

14 self.n_items = 0 

+

15 self.avg = 0.0 

+

16 

+

17 def update(self, val: float, n: int = 1): 

+

18 self.n_items += n 

+

19 self.val += val 

+

20 

+

21 self.avg = self.val / self.n_items 

+

22 

+

23 def __str__(self): 

+

24 return f"{self.name}: {self.avg: {self.fmt}}" 

+

25 

+

26 def to_dict(self): 

+

27 return {"name": self.name, "val": self.avg} 

+
+ + + diff --git a/_static/codecov/d_e3bc015ca2131802_trainer_py.html b/_static/codecov/d_e3bc015ca2131802_trainer_py.html new file mode 100644 index 00000000..4ef1f82a --- /dev/null +++ b/_static/codecov/d_e3bc015ca2131802_trainer_py.html @@ -0,0 +1,234 @@ + + + + + Coverage for src/nos/trainers/trainer.py: 0% + + + + + +
+
+

+ Coverage for src/nos/trainers/trainer.py: + 0% +

+ +

+ 84 statements   + + + +

+

+ « prev     + ^ index     + » next +       + coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+ +
+
+
+

1import time 

+

2 

+

3import mlflow 

+

4import torch.optim.lr_scheduler as sched 

+

5import torch.utils.data 

+

6from continuity.data import ( 

+

7 OperatorDataset, 

+

8) 

+

9from continuity.operators import ( 

+

10 Operator, 

+

11) 

+

12from loguru import ( 

+

13 logger, 

+

14) 

+

15from torch.utils.data import ( 

+

16 DataLoader, 

+

17) 

+

18from tqdm import ( 

+

19 tqdm, 

+

20) 

+

21 

+

22from .average_metric import ( 

+

23 AverageMetric, 

+

24) 

+

25 

+

26 

+

27class Trainer: 

+

28 def __init__(self, criterion, optimizer): 

+

29 self.test_val_split = 0.9 

+

30 self.criterion = criterion 

+

31 self.optimizer = optimizer 

+

32 self.scheduler = sched.ConstantLR(optimizer, 1.0) 

+

33 

+

34 def __call__( 

+

35 self, operator: Operator, data_set: OperatorDataset, max_epochs: int = 100, batch_size: int = 2**10 

+

36 ) -> Operator: 

+

37 device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 

+

38 # train_set, val_set = random_split(data_set, [self.test_val_split, 1 - self.test_val_split]) 

+

39 train_set = val_set = data_set 

+

40 

+

41 logger.info(f"Starting training for {max_epochs} epochs on {device}.") 

+

42 

+

43 train_loader = DataLoader(train_set, batch_size=batch_size) 

+

44 val_loader = DataLoader(val_set, batch_size=batch_size) 

+

45 

+

46 operator.to(device) 

+

47 

+

48 with mlflow.start_run(): 

+

49 pbar = tqdm(range(max_epochs)) 

+

50 train_loss = torch.inf 

+

51 val_loss = torch.inf 

+

52 for epoch in pbar: 

+

53 pbar.set_description( 

+

54 f"Train Loss: {train_loss: .6f},\t Val Loss: {val_loss: .6f}, Lr: {self.optimizer.param_groups[0]['lr']}" 

+

55 ) 

+

56 train_loss = self.train(train_loader, operator, epoch, device) 

+

57 val_loss = self.eval(val_loader, operator, epoch, device) 

+

58 self.scheduler.step() 

+

59 self.log("lr", self.optimizer.param_groups[0]["lr"], epoch) 

+

60 

+

61 logger.info("Training finished.") 

+

62 

+

63 return operator 

+

64 

+

65 def train(self, loader, model, epoch, device): 

+

66 batch_time = AverageMetric("Train-time", ":6.3f") 

+

67 data_time = AverageMetric("Train-data-load", ":6.3f") 

+

68 data_transfer = AverageMetric("Train-data-transfer", "6.3f") 

+

69 avg_loss = AverageMetric("Train-loss", ":6.3f") 

+

70 

+

71 # switch to train mode 

+

72 model.train() 

+

73 end = time.time() 

+

74 

+

75 for x, u, y, v in loader: 

+

76 start = time.time() 

+

77 data_time.update(start - end) # measure data loading time 

+

78 x, u, y, v = x.to(device), u.to(device), y.to(device), v.to(device) 

+

79 data_transfer.update(time.time() - start) 

+

80 

+

81 # compute output 

+

82 output = model(x, u, y) 

+

83 loss = self.criterion(output, v) 

+

84 

+

85 # compute gradient 

+

86 self.optimizer.zero_grad() 

+

87 loss.backward() 

+

88 self.optimizer.step() 

+

89 

+

90 # update metrics 

+

91 avg_loss.update(loss.item()) 

+

92 

+

93 # measure elapsed time 

+

94 batch_time.update(time.time() - end) 

+

95 end = time.time() 

+

96 

+

97 self.log(**batch_time.to_dict(), epoch=epoch) 

+

98 self.log(**data_time.to_dict(), epoch=epoch) 

+

99 self.log(**data_transfer.to_dict(), epoch=epoch) 

+

100 self.log(**avg_loss.to_dict(), epoch=epoch) 

+

101 return avg_loss.avg 

+

102 

+

103 def eval(self, loader, model, epoch, device): 

+

104 batch_time = AverageMetric("Eval-time", ":6.3f") 

+

105 data_time = AverageMetric("Eval-data-load", ":6.3f") 

+

106 data_transfer = AverageMetric("Eval-data-transfer", "6.3f") 

+

107 avg_loss = AverageMetric("Eval-loss", ":6.3f") 

+

108 

+

109 # switch to train mode 

+

110 model.eval() 

+

111 end = time.time() 

+

112 

+

113 for x, u, y, v in loader: 

+

114 start = time.time() 

+

115 data_time.update(start - end) # measure data loading time 

+

116 x, u, y, v = x.to(device), u.to(device), y.to(device), v.to(device) 

+

117 data_transfer.update(time.time() - start) 

+

118 

+

119 # compute output 

+

120 output = model(x, u, y) 

+

121 loss = self.criterion(output, v) 

+

122 

+

123 # update metrics 

+

124 avg_loss.update(loss.item()) 

+

125 

+

126 # measure elapsed time 

+

127 batch_time.update(time.time() - end) 

+

128 end = time.time() 

+

129 

+

130 self.log(**batch_time.to_dict(), epoch=epoch) 

+

131 self.log(**data_time.to_dict(), epoch=epoch) 

+

132 self.log(**data_transfer.to_dict(), epoch=epoch) 

+

133 self.log(**avg_loss.to_dict(), epoch=epoch) 

+

134 return avg_loss.avg 

+

135 

+

136 def log(self, name: str, val: float, epoch: int): 

+

137 mlflow.log_metric(key=name, value=val, step=epoch) 

+
+ + + diff --git a/_static/codecov/favicon_32.png b/_static/codecov/favicon_32.png new file mode 100644 index 0000000000000000000000000000000000000000..8649f0475d8d20793b2ec431fe25a186a414cf10 GIT binary patch literal 1732 zcmV;#20QtQP)K2KOkBOVxIZChq#W-v7@TU%U6P(wycKT1hUJUToW3ke1U1ONa4 z000000000000000bb)GRa9mqwR9|UWHy;^RUrt?IT__Y0JUcxmBP0(51q1>E00030 z|NrOz)aw7%8sJzM<5^g%z7^qE`}_Ot|JUUG(NUkWzR|7K?Zo%@_v-8G-1N%N=D$;; zw;keH4dGY$`1t4M=HK_s*zm^0#KgqfwWhe3qO_HtvXYvtjgX>;-~C$L`&k>^R)9)7 zdPh2TL^pCnHC#0+_4D)M`p?qp!pq{jO_{8;$fbaflbx`Tn52n|n}8VFRTA1&ugOP< zPd{uvFjz7t*Vot1&d$l-xWCk}s;sQL&#O(Bskh6gqNJv>#iB=ypG1e3K!K4yc7!~M zfj4S*g^zZ7eP$+_Sl07Z646l;%urinP#D8a6TwRtnLIRcI!r4f@bK~9-`~;E(N?Lv zSEst7s;rcxsi~}{Nsytfz@MtUoR*iFc8!#vvx}Umhm4blk(_~MdVD-@dW&>!Nn~ro z_E~-ESVQAj6Wmn;(olz(O&_{U2*pZBc1aYjMh>Dq3z|6`jW`RDHV=t3I6yRKJ~LOX zz_z!!vbVXPqob#=pj3^VMT?x6t(irRmSKsMo1~LLkB&=#j!=M%NP35mfqim$drWb9 zYIb>no_LUwc!r^NkDzs4YHu@=ZHRzrafWDZd1EhEVq=tGX?tK$pIa)DTh#bkvh!J- z?^%@YS!U*0E8$q$_*aOTQ&)Ra64g>ep;BdcQgvlg8qQHrP*E$;P{-m=A*@axn@$bO zO-Y4JzS&EAi%YG}N?cn?YFS7ivPY=EMV6~YH;+Xxu|tefLS|Aza)Cg6us#)=JW!uH zQa?H>d^j+YHCtyjL^LulF*05|F$RG!AX_OHVI&MtA~_@=5_lU|0000rbW%=J06GH4 z^5LD8b8apw8vNh1ua1mF{{Hy)_U`NA;Nacc+sCpuHXa-V{r&yz?c(9#+}oX+NmiRW z+W-IqK1oDDR5;6GfCDCOP5}iL5fK(cB~ET81`MFgF2kGa9AjhSIk~-E-4&*tPPKdiilQJ11k_J082ZS z>@TvivP!5ZFG?t@{t+GpR3XR&@*hA_VE1|Lo8@L@)l*h(Z@=?c-NS$Fk&&61IzUU9 z*nPqBM=OBZ-6ka1SJgGAS-Us5EN)r#dUX%>wQZLa2ytPCtMKp)Ob z*xcu38Z&d5<-NBS)@jRD+*!W*cf-m_wmxDEqBf?czI%3U0J$Xik;lA`jg}VH?(S(V zE!M3;X2B8w0TnnW&6(8;_Uc)WD;Ms6PKP+s(sFgO!}B!^ES~GDt4qLPxwYB)^7)XA zZwo9zDy-B0B+jT6V=!=bo(zs_8{eBA78gT9GH$(DVhz;4VAYwz+bOIdZ-PNb|I&rl z^XG=vFLF)1{&nT2*0vMz#}7^9hXzzf&ZdKlEj{LihP;|;Ywqn35ajP?H?7t|i-Un% z&&kxee@9B{nwgv1+S-~0)E1{ob1^Wn`F2isurqThKK=3%&;`@{0{!D- z&CSj80t;uPu&FaJFtSXKH#ajgGj}=sEad7US6jP0|Db@0j)?(5@sf<7`~a9>s;wCa zm^)spe{uxGFmrJYI9cOh7s$>8Npkt-5EWB1UKc`{W{y5Ce$1+nM9Cr;);=Ju#N^62OSlJMn7omiUgP&ErsYzT~iGxcW aE(`!K@+CXylaC4j0000 + + + + Coverage report + + + + + +
+
+

Coverage report: + 59% +

+ +
+ +
+

+ coverage.py v7.4.3, + created at 2024-03-14 18:23 +0000 +

+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Modulestatementsmissingexcludedcoverage
src/nos/benchmarks/benchmark.py900100%
src/nos/benchmarks/transmission_loss.py600100%
src/nos/data/transmssion_loss.py9545053%
src/nos/metrics/error_metrics.py215076%
src/nos/metrics/metric.py903100%
src/nos/metrics/operator_metrics.py2200100%
src/nos/networks/residual.py271096%
src/nos/operators/deep_dot_operator.py2800100%
src/nos/operators/deep_root_operator.py3500100%
src/nos/operators/mean_stack_neural_operator.py1800100%
src/nos/trainers/average_metric.py212100%
src/nos/trainers/trainer.py848400%
src/nos/transforms/scaling.py91089%
Total384157359%
+

+ No items found using the specified filter. +

+
+ + + diff --git a/_static/codecov/keybd_closed.png b/_static/codecov/keybd_closed.png new file mode 100644 index 0000000000000000000000000000000000000000..ba119c47df81ed2bbd27a06988abf700139c4f99 GIT binary patch literal 9004 zcmeHLc{tSF+aIY=A^R4_poB4tZAN2XC;O7M(inrW3}(h&Q4}dl*&-65$i9^&vW6_# zcM4g`Qix=GhkBl;=lwnJ@Ap2}^}hc-b6vBXb3XUyzR%~}_c`-Dw+!?&>5p(90RRB> zXe~7($~PP3eT?=X<@3~Q1w84vX~IoSx~1#~02+TopXK(db;4v6!{+W`RHLkkHO zo;+s?)puc`+$yOwHv>I$5^8v^F3<|$44HA8AFnFB0cAP|C`p}aSMJK*-CUB{eQ!;K z-9Ju3OQ+xVPr3P#o4>_lNBT;M+1vgV&B~6!naOGHb-LFA9TkfHv1IFA1Y!Iz!Zl3) z%c#-^zNWPq7U_}6I7aHSmFWi125RZrNBKyvnV^?64)zviS;E!UD%LaGRl6@zn!3E{ zJ`B$5``cH_3a)t1#6I7d==JeB_IcSU%=I#DrRCBGm8GvCmA=+XHEvC2SIfsNa0(h9 z7P^C4U`W@@`9p>2f^zyb5B=lpc*RZMn-%%IqrxSWQF8{ec3i?-AB(_IVe z)XgT>Y^u41MwOMFvU=I4?!^#jaS-%bjnx@ zmL44yVEslR_ynm18F!u}Ru#moEn3EE?1=9@$B1Z5aLi5b8{&?V(IAYBzIar!SiY3< z`l0V)djHtrImy}(!7x-Pmq+njM)JFQ9mx*(C+9a3M)(_SW|lrN=gfxFhStu^zvynS zm@gl;>d8i8wpUkX42vS3BEzE3-yctH%t0#N%s+6-&_<*Fe7+h=`=FM?DOg1)eGL~~ zQvIFm$D*lqEh07XrXY=jb%hdyP4)`wyMCb$=-z9(lOme9=tirVkb)_GOl2MJn;=Ky z^0pV1owR7KP-BSxhI@@@+gG0roD-kXE1;!#R7KY1QiUbyDdTElm|ul7{mMdF1%UDJ z_vp=Vo!TCF?D*?u% zk~}4!xK2MSQd-QKC0${G=ZRv2x8%8ZqdfR!?Dv=5Mj^8WU)?iH;C?o6rSQy*^YwQb zf@5V)q=xah#a3UEIBC~N7on(p4jQd4K$|i7k`d8mw|M{Mxapl46Z^X^9U}JgqH#;T z`CTzafpMD+J-LjzF+3Xau>xM_sXisRj6m-287~i9g|%gHc}v77>n_+p7ZgmJszx!b zSmL4wV;&*5Z|zaCk`rOYFdOjZLLQr!WSV6AlaqYh_OE)>rYdtx`gk$yAMO=-E1b~J zIZY6gM*}1UWsJ)TW(pf1=h?lJy_0TFOr|nALGW>$IE1E7z+$`^2WJY+>$$nJo8Rs` z)xS>AH{N~X3+b=2+8Q_|n(1JoGv55r>TuwBV~MXE&9?3Zw>cIxnOPNs#gh~C4Zo=k z&!s;5)^6UG>!`?hh0Q|r|Qbm>}pgtOt23Vh!NSibozH$`#LSiYL)HR4bkfEJMa zBHwC3TaHx|BzD|MXAr>mm&FbZXeEX-=W}Ji&!pji4sO$#0Wk^Q7j%{8#bJPn$C=E% zPlB}0)@Ti^r_HMJrTMN?9~4LQbIiUiOKBVNm_QjABKY4;zC88yVjvB>ZETNzr%^(~ zI3U&Ont?P`r&4 z#Bp)jcVV_N_{c1_qW}_`dQm)D`NG?h{+S!YOaUgWna4i8SuoLcXAZ|#Jh&GNn7B}3 z?vZ8I{LpmCYT=@6)dLPd@|(;d<08ufov%+V?$mgUYQHYTrc%eA=CDUzK}v|G&9}yJ z)|g*=+RH1IQ>rvkY9UIam=fkxWDyGIKQ2RU{GqOQjD8nG#sl+$V=?wpzJdT=wlNWr z1%lw&+;kVs(z?e=YRWRA&jc75rQ~({*TS<( z8X!j>B}?Bxrrp%wEE7yBefQ?*nM20~+ZoQK(NO_wA`RNhsqVkXHy|sod@mqen=B#@ zmLi=x2*o9rWqTMWoB&qdZph$~qkJJTVNc*8^hU?gH_fY{GYPEBE8Q{j0Y$tvjMv%3 z)j#EyBf^7n)2d8IXDYX2O0S%ZTnGhg4Ss#sEIATKpE_E4TU=GimrD5F6K(%*+T-!o z?Se7^Vm`$ZKDwq+=~jf?w0qC$Kr&R-;IF#{iLF*8zKu8(=#chRO;>x zdM;h{i{RLpJgS!B-ueTFs8&4U4+D8|7nP~UZ@P`J;*0sj^#f_WqT#xpA?@qHonGB& zQ<^;OLtOG1w#)N~&@b0caUL7syAsAxV#R`n>-+eVL9aZwnlklzE>-6!1#!tVA`uNo z>Gv^P)sohc~g_1YMC;^f(N<{2y5C^;QCEXo;LQ^#$0 zr>jCrdoeXuff!dJ^`#=Wy2Gumo^Qt7BZrI~G+Pyl_kL>is3P0^JlE;Sjm-YfF~I>t z_KeNpK|5U&F4;v?WS&#l(jxUWDarfcIcl=-6!8>^S`57!M6;hZea5IFA@)2+*Rt85 zi-MBs_b^DU8LygXXQGkG+86N7<%M|baM(orG*ASffC`p!?@m{qd}IcYmZyi^d}#Q& zNjk-0@CajpUI-gPm20ERVDO!L8@p`tMJ69FD(ASIkdoLdiRV6h9TPKRz>2WK4upHd z6OZK33EP?`GoJkXh)S035}uLUO$;TlXwNdMg-WOhLB)7a`-%*a9lFmjf6n+4ZmIHN z-V@$ z8PXsoR4*`5RwXz=A8|5;aXKtSHFccj%dG7cO~UBJnt)61K>-uPX)`vu{7fcX6_>zZ zw_2V&Li+7mxbf!f7{Rk&VVyY!UtZywac%g!cH+xh#j$a`uf?XWl<``t`36W;p7=_* zO6uf~2{sAdkZn=Ts@p0>8N8rzw2ZLS@$ibV-c-QmG@%|3gUUrRxu=e*ekhTa+f?8q z3$JVGPr9w$VQG~QCq~Y=2ThLIH!T@(>{NihJ6nj*HA_C#Popv)CBa)+UI-bx8u8zfCT^*1|k z&N9oFYsZEijPn31Yx_yO5pFs>0tOAV=oRx~Wpy5ie&S_449m4R^{LWQMA~}vocV1O zIf#1ZV85E>tvZE4mz~zn{hs!pkIQM;EvZMimqiPAJu-9P@mId&nb$lsrICS=)zU3~ zn>a#9>}5*3N)9;PTMZ)$`5k} z?iG}Rwj$>Y*|(D3S3e&fxhaPHma8@vwu(cwdlaCjX+NIK6=$H4U`rfzcWQVOhp{fnzuZhgCCGpw|p zTi`>cv~xVzdx|^`C0vXdlMwPae3S?>3|7v$e*Bs6-5gS>>FMHk_r2M(ADOV{KV7+6 zA@5Q(mdx%7J}MY}K461iuQ}5GwDGI=Yc&g0MZHu)7gC3{5@QZj6SJl*o0MS2Cl_ia zyK?9QmC9tJ6yn{EA-erJ4wk$+!E#X(s~9h^HOmQ_|6V_s1)k;%9Q6Niw}SyT?jxl4 z;HYz2$Nj$8Q_*Xo`TWEUx^Q9b+ik@$o39`mlY&P}G8wnjdE+Dlj?uL;$aB$n;x zWoh-M_u>9}_Ok@d_uidMqz10zJc}RQijPW3Fs&~1am=j*+A$QWTvxf9)6n;n8zTQW z!Q_J1%apTsJzLF`#^P_#mRv2Ya_keUE7iMSP!ha-WQoo0vZZG?gyR;+4q8F6tL#u< zRj8Hu5f-p1$J;)4?WpGL{4@HmJ6&tF9A5Tc8Trp>;Y>{^s?Q1&bam}?OjsnKd?|Z82aix26wUOLxbEW~E)|CgJ#)MLf_me# zv4?F$o@A~Um)6>HlM0=3Bd-vc91EM}D+t6-@!}O%i*&Wl%@#C8X+?5+nv`oPu!!=5 znbL+Fk_#J_%8vOq^FIv~5N(nk03kyo1p@l|1c+rO^zCG3bk2?|%AF;*|4si1XM<`a z1NY0-8$wv?&129!(g_A1lXR!+pD*1*cF?T~e1d6*G1Fz)jcSaZoKpxtA%FNnKP2jo zLXn@OR#1z@6zuH%mMB98}-t zHJqClsZ!G5xMSgIs_=<8sBePXxfoXsuvy`|buON9BX%s-o>OVLA)k3W=wKnw1?so$ zEjm0aS=zu@Xu#;{A)QTjJ$a9_={++ACkRY*sk3jLk&Fu}RxR<-DXR<`5`$VNG*wJE zidM6VzaQ!M0gbQM98@x@;#0qUS8O)p6mrYwTk*;8J~!ovbY6jon^Ki}uggd3#J5G8 z>awvtF85Y<9yE{Iag}J7O7)1O=ylk^255@XmV5J06-{xaaSNASZoTKKp~$tSxdUI~ zU1RZ&UuW37Ro&_ryj^cSt$Jd&pt|+h!A&dwcr&`S=R5E`=6Tm`+(qGm@$YZ8(8@a$ zXfo@Rwtvm7N3RMmVCb7radAs-@QtCXx^CQ-<)V>QPLZy@jH{#dc4#(y zV)6Hp{ZMz!|NG8!>i01gZMy)G<8Hf2X7e&LH_gOaajW<<^Xi55@OnlY*|S|*TS8;u_nHbv7lgmmZ+Q<5 zi!*lLCJmdpyzl(L${$C?(pVo|oR%r~x_B_ocPePa_);27^=n4L=`toZ;xdBut9rSv z?wDQ7j2I3WQBdhz%X7`2YaG_y|wA!7|s?k;A&WNMLMTZEzCaE^d??E&u?f=ejQBR~|< z)=thyP2(p8r6mt?Ad}tXAP_GvF9|P630I;$1cpQ+Ay7C34hK^ZV3H4kjPV8&NP>G5 zKRDEIBrFl{M#j4mfP0)68&?mqJP1S?2mU0djAGTjDV;wZ?6vplNn~3Hn$nP>%!dMi zz@bnC7zzi&k&s{QDWkf&zgrVXKUJjY3Gv3bL0}S4h>OdgEJ$Q^&p-VAr3J}^a*+rz z!jW7(h*+GuCyqcC{MD(Ovj^!{pB^OKUe|uy&bD?CN>KZrf3?v>>l*xSvnQiH-o^ViN$%FRdm9url;%(*jf5H$*S)8;i0xWHdl>$p);nH9v0)YfW?Vz$! zNCeUbi9`NEg(i^57y=fzM@1o*z*Bf6?QCV>2p9}(BLlYsOCfMjFv1pw1mlo)Py{8v zppw{MDfEeWN+n>Ne~oI7%9cU}mz0r3!es2gNF0t5jkGipjIo2lz;-e)7}Ul_#!eDv zw;#>kI>;#-pyfeu3Fsd^2F@6=oh#8r9;A!G0`-mm7%{=S;Ec(bJ=I_`FodKGQVNEY zmXwr4{9*jpDl%4{ggQZ5Ac z%wYTdl*!1c5^)%^E78Q&)ma|27c6j(a=)g4sGrp$r{jv>>M2 z6y)E5|Aooe!PSfKzvKA>`a6pfK3=E8vL14ksP&f=>gOP?}rG6ye@9ZR3 zJF*vsh*P$w390i!FV~~_Hv6t2Zl<4VUi|rNja#boFt{%q~xGb z(2petq9A*_>~B*>?d?Olx^lmYg4)}sH2>G42RE; literal 0 HcmV?d00001 diff --git a/_static/codecov/keybd_open.png b/_static/codecov/keybd_open.png new file mode 100644 index 0000000000000000000000000000000000000000..a8bac6c9de256626c680f9e9e3f8ee81d9713ecd GIT binary patch literal 9003 zcmeHLc{tST+n?-2i>)FxMv|DtSZA{DOOu_57&BjtZI~H*ma;_1l4LIxB9dJQ*|TO# zN!sjLvQy+8>YUSgf9L)E-g8~=``>Y0!#wx%xj*;)e4hJ$zP?Ym-Z>3679JK52*jqP zscJy|%SHXLGSN|g3$@6f1Az_(`xu?47+^iYt|X!@!3h9Uyj=k>;6<(w94t$&Tmv4vUI0Y(72z4p-=52qQm)ibdMG{Lq zK-QAXj0ngGo#r{-=KfvMuhjI#;F3ml_v?vI<2-B3E&Sb83IPcet8E#VcMLMbDBXp( zietxGS0^|mhdOuNU*! z>lxhuyJ~5HC9jEu^6wu9yggaJEILLJFELe{&yOk3uY^_mY(J*EdTA{CbDHru&S*s5 zFHGCrim@r19P**ASiJAew_7dD+e>cSOtls3Z#(>lZx1iINjrV7NNt%PDNcMkXlA*W z`Bs*%ezf4U5NxJm__K5P?GEB7`Q`04T`~MTc=Sf&%qHuFd;!rn3}>8+-@yEidsy4J zwgV$+ymZ>vxo%s!H&}(*({B{M0j#!`Lt5GDbvmkji<_pajk9^n5DO(1Q=&m;TJ!?& z?dIZM5vQ>Gv(&EdlJNx^(v{pFFPfSP@r^ zUhRTD7bv*AYH`?Gq11M%nz2r;gHNp42jVLD`5tDqtqX8m!12pRUB0&T%w5?UN8u2$ z{33ra^&{S8?zu^Udrw+}HTUH(`Hi#oxx_~8z^KjV88Ir*uZL|Sg~!j^L_s$=4bBRW zop?W3)Xm?LO6n3E9KHt6XpGZ_HN~5oyARM_FU(4I%qcBvz8@9K>nRPh&##*Eoh-~w z_nj&&SNa->_^2rmZKKZTTsb8qBi7eZ+<|^m6k%kJZMtc45f~Vd$|>90cV@0+305_? z$}Q=5?!3a*rg#60fWtWf!9(Na58NEPqWSacwBi#FiX9R?*v-C&eMqb0k&TM0y0Va% zz~=|oCLbfUU9)b69enmUFXBy2)12vO`bS&kb^YOC0g}4%8d0@NbMm6<9C^4VY$)DE z97dE-HVFOL-)`t{@mQPechUcK@>Nbm7VqtmzZyM5U<`U@;RjksVMF8R*E>VhuI zkJSj=K$J!b9wLT59DZFvicVNQpWLaC2991nDs(piR8YcRq>puA}_3int5bZCnSnDDDBIyC`&DN%_Rawgsxlzfrw!$YU zk697D5ny@b5%eg+G2F&np#M_QkwT<~o z=20^H-;eo=m3|I#91GRY0$TY@>nd$|*Y@6PiI*+2I$KO&NY?@M466>Gt%~Lgowk~^JM_8wk%ghs}g}t}vM}#g;++DAjY#7oR5>!9Zb&%tZ@Av?{`s6b=pUPf& z`Ej0w!tuWT?VOSJ(s^!$)o|_8JY0RAMH30nz=QERTWUx%i6hBP9(PAp{ZQXvk!u}#Vab<|7#n z{maX?O+c&it?=GMZ6-mCiq1b`jrvnH%AIwV(c=)Y+Ng zV<#loBasaSDG>p~!~6DW%DmIwBgLM5kIpGHr(+-C2oq1L_i5|QlNU`n4xG_p4P3X+ zRb3J0k2659ugVF3jbY3g*#hm^+qFWErnuOPd#1_kH{$GKT=$ySdOG<2GJTTZieX8- z?SgdRq&e6K0~#g8LaMO>bF{p3>QU`28P6mcPxd#h%a3HMTriHT*5N2RdHdrvo)Hl( z`U&a1G+qKp7@qqMO*C~Dy@6-;0(yrivn$>oJm|n&YNs2%lFk?#rUv7N=CbY!26_#` zOwy)}i?Rp4nN$r%&5zU9O^|X|`}0gh4dooTajuqYy@fN0lYu~6li4||>k%x%XO;xj z5hh>P?#m$1I$s2gk=e^$N7Mm%F()PB*mBjl8#GTm}V z$n>4H{Zn?>tRb54D4BSNiH}riISvV^~kJ4Oqi-Q}*uV!1arYe1u@i3%->Aj(r zIL(E2nn^nhc3)1$LG?M!Z0P!8{kc7jVZ|z31Z9vW;zWG03+NwSV4)_v?8U zWzJng#k|hYcWf&`>pXSb$1J+|*RC+y0H1PLZGt#e5IB@{-e@rJo$|6ec*b&%(FN6?k>rN1-Nr$ z4m|s8prjrxoFseZy3M8c%nY<;8djgwW?!ntbr_BuPh)z_r$EZ(kbFfHIe-m~a@%)q zLHUZt{_ImXka>hsv7(tXD6IvCnD*Y9=OgFxoLemASErKGmb*^Vr}f(jx0bPl+I)E& zdgR_RtTV3aL1y$Y0L5%R`aCZ_j3{hDnOKUvJ-^B&r*-n!H1{M-gxge|1@AvCd1;LQ z&gyHGB7uzB5-;A*PN28V&l6{zV&ytnvv49kQD;x-Jcw{TPutVpBdI*~r2kQt;9y9} zrm;uL{ueR+pCY~(GsbF5WOLs1yA+{d^Nmfm{aCu^(uKBHuPP3>NOHZQeGCtO_(B6)e%e38$iS+A2@EuwaM3TExzF}i&|u$ zKssx-vZFF{(!fLzv#fm`hUWZG5W_HwZrHcibZGYIaTr8bF#XA~Yf^ke%h&0u3Dx%! z^ibu!hA$rmFDYFLiIR1*I%r`O?aUXua(z?Y&59c);yYe5&auIz#2%m$bF*Hyeb18q z{s%|D-an(}lltLeI1PH%zkvDJwfC);yKU+wq>Y~}`Wh1~1YKy!?;AbZMc?c-xx!ID zGU@t4XMu&;EzIlDe3)0mJ*~+gZ-I|7lWVH7XtQ^*7s@OAG%rXhF&W2i7^~4ZIjANP z)iqZodK~wkV=H<3sb9XbJmqa^_fu6Md2TL+@V@LjyB!gdKL)fcuy|X!v>b{(24;h6 zJWY9Lv8*x1KY;xnwHPyvsDJ@ za=nD?=lf8HdL|ib^6{~*M~Z^@X6f4_vccD5U;FmpEMP#m#3a{Hv(qAR7jbY4j^jmY1_kGt2jCr9Hcns@ad#dkAiH(87OC%{OL&%A8E67dds4 zUUa(por`Wt!CH3Hh4y+T!9&*HuNopp&DuC!EBsu2>zv#{TDK;p*zGdw3Q}{Qa3l3P z;iD#9LF=sx7%v`;5kM(4uz1BHUXiwju?VgYWB8vDMa+TeebP^R`85D{{ zc$n4X&Z!+bAB>Phr{s{sU9$^T=t{2+HO8<@oNBifmQ0|Km;F^;iwj#gXkI1ur>(!Z zG@-if3==No%Idh?cck)-zRX2RqlFtoV`vrn=qyc?4xL}sirUxBJ4r!#F?aOvj)juB z%{tu=P8ttd5+4}c=Ud{6@wDYv&cB^kki63NIG@ATX%<^s?;CRDcEa1`cD0Wo0dd{Y z6qjdr3O;ft)T>4e(3iLm_u`QvGhKad%P9zU^Lh8<(*A{x4mEG2wo)t&m&#+lvgmgT zX=0eA>sxXaMJ9`9ydOiNS4<9P-1gH31Wp9bo%!tP$g@wsOnW*#!un#WK&N2z$F93% z)7XXFa=YT;W;+I0qF=FN_Dr$}{`Q67WG7Phqm*HvlkJb*IdK?p`G_u_U_TMccM}%Z z9o(j&Lzg2plsL#1uY|kR zlIJvxnYMIcl8WJUtLEWZ=Jc)J-!GUhx*adO`KdDYV3eE|sbm38a(2si#4)I#TQ{ zu?Gg4M4z6{uc>!WZ(Z|4?1_ml(CD!lWvQIf+81z4K0o}Pq{RyyL8J8^KU+axA#4qy zQ_Hf5_NC-tOOi9sMZFnv)U{y8i$_y>bVIjd zYdd_eZZ%qsKW*^;2wxh(DlFXEIM5O>17AA*?E6crapNmn`L!Jn>AqbENHS$!E&q-T zFo+4DLWSrzdaYa`rye_*o~K22kByy4JzG;|#gQ7C@QCI9JkMy#2(2Fr`Ks(a7O@xQ zvrGC5UmLAPFdMG#Z`W+kDtZAXOA0bEMIr=*Q!fa#N06YRqNk;z^4on3^%f>IEv8Vr zL60-Ew)rk(`mRiv3IpS4>4mi@^GxX`R5ew(n60W&Syt}_o>A)pgE5&E8 zx78ULi@iR42{_udvF!_&adC>f`(&?{`S`^G4hsg;xq4oViQ6kITte;T!WM@^_k;-B zLpb!avBKI!QgmoYY?o2a^F?+Z#*eEd9ik7<*Uqk8Z`^Mqt=+4+d1B;xTx-$WS;2+I zO|PLhqWk+I$Zt%YKlF@o9>2ARqq#A@Bb52^a#Z=0)&8LgZP% zvLw7M+CWwPCk1sR2eGG6T+wj2r>7^(lX?k3vV)7EP$)P82}dHKR0Ndl?LxtNL0!lK zI}|@SQ~@%ML~x}Lh%VqAPOJ^logxQ;Q0Kuv$*HqAH7~01XMmmYE64doj z0dOP&Ap=Dqp-2?`SAXg(2J^eO3;CytR6XHdSXa0h3;}m`{*wopqUP~Oyub7y8&U5O z;RXPi=uW}`Y94?KMc~(#>9W6^Y0Fj&pS3( z&1F|tv?>wjz7teSRSvR~FB(t85%B2UuQo^=N&+ci3&lwQc&G#dB?U#Ha9F4<9xr7h zBPD@Dps>GCX}ORoSQi|yLq#Qr5vV*UoEQ=zjTM7RN}ch1}Yr4mQkNTZ}}B%l(~;?mS?Yyqf^gft3@K-mCDtb{mq zUTl|YXCKf?dRlT2Bn~8 zNJ`0wBY$x>0Z3$OmG6*>Az;WKS>thNbt)y6T5SYptQ`P%b+Oy!-Psp3bv0CFu{+H{ zW!|+@7lT$I0ayx=WJDx7$w79K1@BPq_7qt5XSblw5^=kZyI=sn({MjqP8n+l-yO=r z{~h>Wm<;WSo-Y48oj67^y5TwBJ4^92JfB%Xe{oB{A8>LfZyE$s*XRVaQ0XiJAiuJ z{_M5i?1aClV_U2g4k1M?Txn@MwF0GZQcxRdF#w7}NFk8o(kPUK)Q?*Eot;dyrFddV zfRY`x2B`Z??XBH?2A}#-e!_oF#?v0ysVxLj42qC|iisN`#nA|Hw73Lyh(;hFKeik! z3*R|qe_OKb&N+m^pnnxbcITWzYwc8{p}VWA69FLoS*+iR=YPQc;{UTy|C9T#upizk zL|1QWC)-nWJzf57_`d-DU^q*_0WM_Xzf1jB$PZb5c^FZ1{$Zm&;FtHmOoy*0T=2& zf1cErYE6u!67_|g#zsd&6|{Xdx}%mlVs_OuBZEMDId(pKK*_0xsYXVM7DkP6jBXz- zEd)lyY5I@OKCuXih+u*QN7paQfUw6wG;XcaW~qWCo?T2*0>x(MuCfDKSAqe7lXsSc7qm4=p(o#F8`bgRO G%6|bpD&^7u literal 0 HcmV?d00001 diff --git a/_static/codecov/status.json b/_static/codecov/status.json new file mode 100644 index 00000000..6a646290 --- /dev/null +++ b/_static/codecov/status.json @@ -0,0 +1 @@ +{"note":"This file is an internal implementation detail to speed up HTML report generation. Its format can change at any time. You might be looking for the JSON report: https://coverage.rtfd.io/cmd.html#cmd-json","format":2,"version":"7.4.3","globals":"6593462d53f14965375e9cf252781277","files":{"d_a94113ed293c43ea_benchmark_py":{"hash":"66d3ea60fc4745975c6a9ff6a972ca3c","index":{"nums":[0,1,9,0,0,0,0,0],"html_filename":"d_a94113ed293c43ea_benchmark_py.html","relative_filename":"src/nos/benchmarks/benchmark.py"}},"d_a94113ed293c43ea_transmission_loss_py":{"hash":"a59974e73cacabaa4c35f4fd222621da","index":{"nums":[0,1,6,0,0,0,0,0],"html_filename":"d_a94113ed293c43ea_transmission_loss_py.html","relative_filename":"src/nos/benchmarks/transmission_loss.py"}},"d_db4399fe0ac4c92a_transmssion_loss_py":{"hash":"068227dd51691223354608bdd536dc29","index":{"nums":[0,1,95,0,45,0,0,0],"html_filename":"d_db4399fe0ac4c92a_transmssion_loss_py.html","relative_filename":"src/nos/data/transmssion_loss.py"}},"d_99aac0089c5f15d8_error_metrics_py":{"hash":"a0f369852091b3528c7749559e8af408","index":{"nums":[0,1,21,0,5,0,0,0],"html_filename":"d_99aac0089c5f15d8_error_metrics_py.html","relative_filename":"src/nos/metrics/error_metrics.py"}},"d_99aac0089c5f15d8_metric_py":{"hash":"914233c3e8d365ef18ed0636c3e858f5","index":{"nums":[0,1,9,3,0,0,0,0],"html_filename":"d_99aac0089c5f15d8_metric_py.html","relative_filename":"src/nos/metrics/metric.py"}},"d_99aac0089c5f15d8_operator_metrics_py":{"hash":"9d35ece251dd9b71a0be4938d2fbe518","index":{"nums":[0,1,22,0,0,0,0,0],"html_filename":"d_99aac0089c5f15d8_operator_metrics_py.html","relative_filename":"src/nos/metrics/operator_metrics.py"}},"d_dfcfe80011fec8d7_residual_py":{"hash":"5fcc85baa9089ace6e8c81cb963f63f1","index":{"nums":[0,1,27,0,1,0,0,0],"html_filename":"d_dfcfe80011fec8d7_residual_py.html","relative_filename":"src/nos/networks/residual.py"}},"d_93093ff38057b02c_deep_dot_operator_py":{"hash":"692cb04b156b9d8c41ffcfe540c62d18","index":{"nums":[0,1,28,0,0,0,0,0],"html_filename":"d_93093ff38057b02c_deep_dot_operator_py.html","relative_filename":"src/nos/operators/deep_dot_operator.py"}},"d_93093ff38057b02c_deep_root_operator_py":{"hash":"97c465ecc7fdec595784c740eeb1f1e7","index":{"nums":[0,1,35,0,0,0,0,0],"html_filename":"d_93093ff38057b02c_deep_root_operator_py.html","relative_filename":"src/nos/operators/deep_root_operator.py"}},"d_93093ff38057b02c_mean_stack_neural_operator_py":{"hash":"eeecdca3cb89f250f502d28b53fd8c40","index":{"nums":[0,1,18,0,0,0,0,0],"html_filename":"d_93093ff38057b02c_mean_stack_neural_operator_py.html","relative_filename":"src/nos/operators/mean_stack_neural_operator.py"}},"d_e3bc015ca2131802_average_metric_py":{"hash":"a2bb7bc8d0ac8f140cccc0072564c6e5","index":{"nums":[0,1,21,0,21,0,0,0],"html_filename":"d_e3bc015ca2131802_average_metric_py.html","relative_filename":"src/nos/trainers/average_metric.py"}},"d_e3bc015ca2131802_trainer_py":{"hash":"f3c9369713c5ef25fe50ff80a11dc30a","index":{"nums":[0,1,84,0,84,0,0,0],"html_filename":"d_e3bc015ca2131802_trainer_py.html","relative_filename":"src/nos/trainers/trainer.py"}},"d_040e78c194e497af_scaling_py":{"hash":"461f9fc00b95981ae83bf656edd72bcf","index":{"nums":[0,1,9,0,1,0,0,0],"html_filename":"d_040e78c194e497af_scaling_py.html","relative_filename":"src/nos/transforms/scaling.py"}}}} \ No newline at end of file diff --git a/_static/codecov/style.css b/_static/codecov/style.css new file mode 100644 index 00000000..2555fdfe --- /dev/null +++ b/_static/codecov/style.css @@ -0,0 +1,309 @@ +@charset "UTF-8"; +/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */ +/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */ +/* Don't edit this .css file. Edit the .scss file instead! */ +html, body, h1, h2, h3, p, table, td, th { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; } + +body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; font-size: 1em; background: #fff; color: #000; } + +@media (prefers-color-scheme: dark) { body { background: #1e1e1e; } } + +@media (prefers-color-scheme: dark) { body { color: #eee; } } + +html > body { font-size: 16px; } + +a:active, a:focus { outline: 2px dashed #007acc; } + +p { font-size: .875em; line-height: 1.4em; } + +table { border-collapse: collapse; } + +td { vertical-align: top; } + +table tr.hidden { display: none !important; } + +p#no_rows { display: none; font-size: 1.2em; } + +a.nav { text-decoration: none; color: inherit; } + +a.nav:hover { text-decoration: underline; color: inherit; } + +.hidden { display: none; } + +header { background: #f8f8f8; width: 100%; z-index: 2; border-bottom: 1px solid #ccc; } + +@media (prefers-color-scheme: dark) { header { background: black; } } + +@media (prefers-color-scheme: dark) { header { border-color: #333; } } + +header .content { padding: 1rem 3.5rem; } + +header h2 { margin-top: .5em; font-size: 1em; } + +header p.text { margin: .5em 0 -.5em; color: #666; font-style: italic; } + +@media (prefers-color-scheme: dark) { header p.text { color: #aaa; } } + +header.sticky { position: fixed; left: 0; right: 0; height: 2.5em; } + +header.sticky .text { display: none; } + +header.sticky h1, header.sticky h2 { font-size: 1em; margin-top: 0; display: inline-block; } + +header.sticky .content { padding: 0.5rem 3.5rem; } + +header.sticky .content p { font-size: 1em; } + +header.sticky ~ #source { padding-top: 6.5em; } + +main { position: relative; z-index: 1; } + +footer { margin: 1rem 3.5rem; } + +footer .content { padding: 0; color: #666; font-style: italic; } + +@media (prefers-color-scheme: dark) { footer .content { color: #aaa; } } + +#index { margin: 1rem 0 0 3.5rem; } + +h1 { font-size: 1.25em; display: inline-block; } + +#filter_container { float: right; margin: 0 2em 0 0; } + +#filter_container input { width: 10em; padding: 0.2em 0.5em; border: 2px solid #ccc; background: #fff; color: #000; } + +@media (prefers-color-scheme: dark) { #filter_container input { border-color: #444; } } + +@media (prefers-color-scheme: dark) { #filter_container input { background: #1e1e1e; } } + +@media (prefers-color-scheme: dark) { #filter_container input { color: #eee; } } + +#filter_container input:focus { border-color: #007acc; } + +header button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; color: inherit; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: #ccc; } + +@media (prefers-color-scheme: dark) { header button { border-color: #444; } } + +header button:active, header button:focus { outline: 2px dashed #007acc; } + +header button.run { background: #eeffee; } + +@media (prefers-color-scheme: dark) { header button.run { background: #373d29; } } + +header button.run.show_run { background: #dfd; border: 2px solid #00dd00; margin: 0 .1em; } + +@media (prefers-color-scheme: dark) { header button.run.show_run { background: #373d29; } } + +header button.mis { background: #ffeeee; } + +@media (prefers-color-scheme: dark) { header button.mis { background: #4b1818; } } + +header button.mis.show_mis { background: #fdd; border: 2px solid #ff0000; margin: 0 .1em; } + +@media (prefers-color-scheme: dark) { header button.mis.show_mis { background: #4b1818; } } + +header button.exc { background: #f7f7f7; } + +@media (prefers-color-scheme: dark) { header button.exc { background: #333; } } + +header button.exc.show_exc { background: #eee; border: 2px solid #808080; margin: 0 .1em; } + +@media (prefers-color-scheme: dark) { header button.exc.show_exc { background: #333; } } + +header button.par { background: #ffffd5; } + +@media (prefers-color-scheme: dark) { header button.par { background: #650; } } + +header button.par.show_par { background: #ffa; border: 2px solid #bbbb00; margin: 0 .1em; } + +@media (prefers-color-scheme: dark) { header button.par.show_par { background: #650; } } + +#help_panel, #source p .annotate.long { display: none; position: absolute; z-index: 999; background: #ffffcc; border: 1px solid #888; border-radius: .2em; color: #333; padding: .25em .5em; } + +#source p .annotate.long { white-space: normal; float: right; top: 1.75em; right: 1em; height: auto; } + +#help_panel_wrapper { float: right; position: relative; } + +#keyboard_icon { margin: 5px; } + +#help_panel_state { display: none; } + +#help_panel { top: 25px; right: 0; padding: .75em; border: 1px solid #883; color: #333; } + +#help_panel .keyhelp p { margin-top: .75em; } + +#help_panel .legend { font-style: italic; margin-bottom: 1em; } + +.indexfile #help_panel { width: 25em; } + +.pyfile #help_panel { width: 18em; } + +#help_panel_state:checked ~ #help_panel { display: block; } + +kbd { border: 1px solid black; border-color: #888 #333 #333 #888; padding: .1em .35em; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-weight: bold; background: #eee; border-radius: 3px; } + +#source { padding: 1em 0 1em 3.5rem; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; } + +#source p { position: relative; white-space: pre; } + +#source p * { box-sizing: border-box; } + +#source p .n { float: left; text-align: right; width: 3.5rem; box-sizing: border-box; margin-left: -3.5rem; padding-right: 1em; color: #999; user-select: none; } + +@media (prefers-color-scheme: dark) { #source p .n { color: #777; } } + +#source p .n.highlight { background: #ffdd00; } + +#source p .n a { margin-top: -4em; padding-top: 4em; text-decoration: none; color: #999; } + +@media (prefers-color-scheme: dark) { #source p .n a { color: #777; } } + +#source p .n a:hover { text-decoration: underline; color: #999; } + +@media (prefers-color-scheme: dark) { #source p .n a:hover { color: #777; } } + +#source p .t { display: inline-block; width: 100%; box-sizing: border-box; margin-left: -.5em; padding-left: 0.3em; border-left: 0.2em solid #fff; } + +@media (prefers-color-scheme: dark) { #source p .t { border-color: #1e1e1e; } } + +#source p .t:hover { background: #f2f2f2; } + +@media (prefers-color-scheme: dark) { #source p .t:hover { background: #282828; } } + +#source p .t:hover ~ .r .annotate.long { display: block; } + +#source p .t .com { color: #008000; font-style: italic; line-height: 1px; } + +@media (prefers-color-scheme: dark) { #source p .t .com { color: #6a9955; } } + +#source p .t .key { font-weight: bold; line-height: 1px; } + +#source p .t .str { color: #0451a5; } + +@media (prefers-color-scheme: dark) { #source p .t .str { color: #9cdcfe; } } + +#source p.mis .t { border-left: 0.2em solid #ff0000; } + +#source p.mis.show_mis .t { background: #fdd; } + +@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t { background: #4b1818; } } + +#source p.mis.show_mis .t:hover { background: #f2d2d2; } + +@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t:hover { background: #532323; } } + +#source p.run .t { border-left: 0.2em solid #00dd00; } + +#source p.run.show_run .t { background: #dfd; } + +@media (prefers-color-scheme: dark) { #source p.run.show_run .t { background: #373d29; } } + +#source p.run.show_run .t:hover { background: #d2f2d2; } + +@media (prefers-color-scheme: dark) { #source p.run.show_run .t:hover { background: #404633; } } + +#source p.exc .t { border-left: 0.2em solid #808080; } + +#source p.exc.show_exc .t { background: #eee; } + +@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t { background: #333; } } + +#source p.exc.show_exc .t:hover { background: #e2e2e2; } + +@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t:hover { background: #3c3c3c; } } + +#source p.par .t { border-left: 0.2em solid #bbbb00; } + +#source p.par.show_par .t { background: #ffa; } + +@media (prefers-color-scheme: dark) { #source p.par.show_par .t { background: #650; } } + +#source p.par.show_par .t:hover { background: #f2f2a2; } + +@media (prefers-color-scheme: dark) { #source p.par.show_par .t:hover { background: #6d5d0c; } } + +#source p .r { position: absolute; top: 0; right: 2.5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; } + +#source p .annotate { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; color: #666; padding-right: .5em; } + +@media (prefers-color-scheme: dark) { #source p .annotate { color: #ddd; } } + +#source p .annotate.short:hover ~ .long { display: block; } + +#source p .annotate.long { width: 30em; right: 2.5em; } + +#source p input { display: none; } + +#source p input ~ .r label.ctx { cursor: pointer; border-radius: .25em; } + +#source p input ~ .r label.ctx::before { content: "▶ "; } + +#source p input ~ .r label.ctx:hover { background: #e8f4ff; color: #666; } + +@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { background: #0f3a42; } } + +@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { color: #aaa; } } + +#source p input:checked ~ .r label.ctx { background: #d0e8ff; color: #666; border-radius: .75em .75em 0 0; padding: 0 .5em; margin: -.25em 0; } + +@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { background: #056; } } + +@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { color: #aaa; } } + +#source p input:checked ~ .r label.ctx::before { content: "▼ "; } + +#source p input:checked ~ .ctxs { padding: .25em .5em; overflow-y: scroll; max-height: 10.5em; } + +#source p label.ctx { color: #999; display: inline-block; padding: 0 .5em; font-size: .8333em; } + +@media (prefers-color-scheme: dark) { #source p label.ctx { color: #777; } } + +#source p .ctxs { display: block; max-height: 0; overflow-y: hidden; transition: all .2s; padding: 0 .5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; white-space: nowrap; background: #d0e8ff; border-radius: .25em; margin-right: 1.75em; text-align: right; } + +@media (prefers-color-scheme: dark) { #source p .ctxs { background: #056; } } + +#index { font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-size: 0.875em; } + +#index table.index { margin-left: -.5em; } + +#index td, #index th { text-align: right; width: 5em; padding: .25em .5em; border-bottom: 1px solid #eee; } + +@media (prefers-color-scheme: dark) { #index td, #index th { border-color: #333; } } + +#index td.name, #index th.name { text-align: left; width: auto; } + +#index th { font-style: italic; color: #333; cursor: pointer; } + +@media (prefers-color-scheme: dark) { #index th { color: #ddd; } } + +#index th:hover { background: #eee; } + +@media (prefers-color-scheme: dark) { #index th:hover { background: #333; } } + +#index th[aria-sort="ascending"], #index th[aria-sort="descending"] { white-space: nowrap; background: #eee; padding-left: .5em; } + +@media (prefers-color-scheme: dark) { #index th[aria-sort="ascending"], #index th[aria-sort="descending"] { background: #333; } } + +#index th[aria-sort="ascending"]::after { font-family: sans-serif; content: " ↑"; } + +#index th[aria-sort="descending"]::after { font-family: sans-serif; content: " ↓"; } + +#index td.name a { text-decoration: none; color: inherit; } + +#index tr.total td, #index tr.total_dynamic td { font-weight: bold; border-top: 1px solid #ccc; border-bottom: none; } + +#index tr.file:hover { background: #eee; } + +@media (prefers-color-scheme: dark) { #index tr.file:hover { background: #333; } } + +#index tr.file:hover td.name { text-decoration: underline; color: inherit; } + +#scroll_marker { position: fixed; z-index: 3; right: 0; top: 0; width: 16px; height: 100%; background: #fff; border-left: 1px solid #eee; will-change: transform; } + +@media (prefers-color-scheme: dark) { #scroll_marker { background: #1e1e1e; } } + +@media (prefers-color-scheme: dark) { #scroll_marker { border-color: #333; } } + +#scroll_marker .marker { background: #ccc; position: absolute; min-height: 3px; width: 100%; } + +@media (prefers-color-scheme: dark) { #scroll_marker .marker { background: #444; } } diff --git a/_static/custom.css b/_static/custom.css new file mode 100644 index 00000000..2a924f1d --- /dev/null +++ b/_static/custom.css @@ -0,0 +1 @@ +/* This file intentionally left blank. */ diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 00000000..d06a71d7 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 00000000..7e4c114f --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 0000000000000000000000000000000000000000..a858a410e4faa62ce324d814e4b816fff83a6fb3 GIT binary patch literal 286 zcmV+(0pb3MP)s`hMrGg#P~ix$^RISR_I47Y|r1 z_CyJOe}D1){SET-^Amu_i71Lt6eYfZjRyw@I6OQAIXXHDfiX^GbOlHe=Ae4>0m)d(f|Me07*qoM6N<$f}vM^LjV8( literal 0 HcmV?d00001 diff --git a/_static/language_data.js b/_static/language_data.js new file mode 100644 index 00000000..250f5665 --- /dev/null +++ b/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 0000000000000000000000000000000000000000..d96755fdaf8bb2214971e0db9c1fd3077d7c419d GIT binary patch literal 90 zcmeAS@N?(olHy`uVBq!ia0vp^+#t*WBp7;*Yy1LIik>cxAr*|t7R?Mi>2?kWtu=nj kDsEF_5m^0CR;1wuP-*O&G^0G}KYk!hp00i_>zopr08q^qX#fBK literal 0 HcmV?d00001 diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 0000000000000000000000000000000000000000..7107cec93a979b9a5f64843235a16651d563ce2d GIT binary patch literal 90 zcmeAS@N?(olHy`uVBq!ia0vp^+#t*WBp7;*Yy1LIik>cxAr*|t7R?Mi>2?kWtu>-2 m3q%Vub%g%s<8sJhVPMczOq}xhg9DJoz~JfX=d#Wzp$Pyb1r*Kz literal 0 HcmV?d00001 diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 00000000..04a41742 --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,84 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #f8f8f8; } +.highlight .c { color: #8f5902; font-style: italic } /* Comment */ +.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */ +.highlight .g { color: #000000 } /* Generic */ +.highlight .k { color: #004461; font-weight: bold } /* Keyword */ +.highlight .l { color: #000000 } /* Literal */ +.highlight .n { color: #000000 } /* Name */ +.highlight .o { color: #582800 } /* Operator */ +.highlight .x { color: #000000 } /* Other */ +.highlight .p { color: #000000; font-weight: bold } /* Punctuation */ +.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #8f5902 } /* Comment.Preproc */ +.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #a40000 } /* Generic.Deleted */ +.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */ +.highlight .ges { color: #000000 } /* Generic.EmphStrong */ +.highlight .gr { color: #ef2929 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #888888 } /* Generic.Output */ +.highlight .gp { color: #745334 } /* Generic.Prompt */ +.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */ +.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */ +.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */ +.highlight .ld { color: #000000 } /* Literal.Date */ +.highlight .m { color: #990000 } /* Literal.Number */ +.highlight .s { color: #4e9a06 } /* Literal.String */ +.highlight .na { color: #c4a000 } /* Name.Attribute */ +.highlight .nb { color: #004461 } /* Name.Builtin */ +.highlight .nc { color: #000000 } /* Name.Class */ +.highlight .no { color: #000000 } /* Name.Constant */ +.highlight .nd { color: #888888 } /* Name.Decorator */ +.highlight .ni { color: #ce5c00 } /* Name.Entity */ +.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #000000 } /* Name.Function */ +.highlight .nl { color: #f57900 } /* Name.Label */ +.highlight .nn { color: #000000 } /* Name.Namespace */ +.highlight .nx { color: #000000 } /* Name.Other */ +.highlight .py { color: #000000 } /* Name.Property */ +.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #000000 } /* Name.Variable */ +.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */ +.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */ +.highlight .w { color: #f8f8f8 } /* Text.Whitespace */ +.highlight .mb { color: #990000 } /* Literal.Number.Bin */ +.highlight .mf { color: #990000 } /* Literal.Number.Float */ +.highlight .mh { color: #990000 } /* Literal.Number.Hex */ +.highlight .mi { color: #990000 } /* Literal.Number.Integer */ +.highlight .mo { color: #990000 } /* Literal.Number.Oct */ +.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */ +.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */ +.highlight .sc { color: #4e9a06 } /* Literal.String.Char */ +.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */ +.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */ +.highlight .se { color: #4e9a06 } /* Literal.String.Escape */ +.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */ +.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */ +.highlight .sx { color: #4e9a06 } /* Literal.String.Other */ +.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */ +.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */ +.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */ +.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #000000 } /* Name.Function.Magic */ +.highlight .vc { color: #000000 } /* Name.Variable.Class */ +.highlight .vg { color: #000000 } /* Name.Variable.Global */ +.highlight .vi { color: #000000 } /* Name.Variable.Instance */ +.highlight .vm { color: #000000 } /* Name.Variable.Magic */ +.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 00000000..7918c3fa --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,574 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 00000000..8a96c69a --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/coverage.html b/coverage.html new file mode 100644 index 00000000..524be95d --- /dev/null +++ b/coverage.html @@ -0,0 +1,112 @@ + + + + + + + + Code Coverage — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

Code Coverage

+
+

For an in detail report on the code coverage click here.

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/examples.html b/examples.html new file mode 100644 index 00000000..2c2dba67 --- /dev/null +++ b/examples.html @@ -0,0 +1,107 @@ + + + + + + + + Examples — Neural Operators documentation + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

Examples

+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/genindex.html b/genindex.html new file mode 100644 index 00000000..8d7f21ee --- /dev/null +++ b/genindex.html @@ -0,0 +1,489 @@ + + + + + + + Index — Neural Operators documentation + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ + +

Index

+ +
+ _ + | B + | D + | F + | L + | M + | N + | R + | S + | T + +
+

_

+ + +
+ +

B

+ + +
+ +

D

+ + + +
+ +

F

+ + +
+ +

L

+ + + +
+ +

M

+ + + +
+ +

N

+ + + +
    +
  • + nos + +
  • +
  • + nos.benchmarks + +
  • +
  • + nos.benchmarks.benchmark + +
  • +
  • + nos.benchmarks.transmission_loss + +
  • +
  • + nos.data + +
  • +
  • + nos.data.transmssion_loss + +
  • +
  • + nos.metrics + +
  • +
  • + nos.metrics.error_metrics + +
  • +
  • + nos.metrics.metric + +
  • +
  • + nos.metrics.operator_metrics + +
  • +
+ +

R

+ + + +
+ +

S

+ + +
+ +

T

+ + + +
+ + + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..a47e5309 --- /dev/null +++ b/index.html @@ -0,0 +1,133 @@ + + + + + + + + Neural Operators for Helmholtz Equation — Neural Operators documentation + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

Neural Operators for Helmholtz Equation

+

Welcome to the documentation for the Neural Operators for Helmholtz Equation project! This project focuses on the application of advanced neural operator techniques to solve the Helmholtz equation in various parameterized geometries. The approach integrates deep learning with physical modeling of domains.

+
+

Contents

+ +
+
+

Introduction

+

The Helmholtz equation is a fundamental partial differential equation in physics, particularly in the fields of acoustics, electromagnetics, and quantum mechanics. In this project, we develop and utilize neural operators, a form of deep learning model, to solve the Helmholtz equation across different parameterized geometries. This approach aims to overcome the limitations of traditional numerical methods, offering speed and flexibility for complex geometries.

+
+
+

API Reference

+

The nos section offers detailed descriptions of the functions, classes, and methods available in this project.

+
+
+

Examples

+

Explore practical applications and see the neural operators in action in the Examples section.

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/modules.html b/modules.html new file mode 100644 index 00000000..d94f134e --- /dev/null +++ b/modules.html @@ -0,0 +1,171 @@ + + + + + + + + nos — Neural Operators documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/nos.benchmarks.html b/nos.benchmarks.html new file mode 100644 index 00000000..e36848bf --- /dev/null +++ b/nos.benchmarks.html @@ -0,0 +1,170 @@ + + + + + + + + nos.benchmarks package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.benchmarks package

+
+

Submodules

+
+
+

nos.benchmarks.benchmark module

+
+
+class nos.benchmarks.benchmark.Benchmark(train_set: continuity.data.dataset.OperatorDataset, test_set: continuity.data.dataset.OperatorDataset, metrics: List[nos.metrics.metric.Metric] = <factory>)
+

Bases: object

+
+
+metrics: List[Metric]
+
+ +
+
+test_set: OperatorDataset
+
+ +
+
+train_set: OperatorDataset
+
+ +
+ +
+
+

nos.benchmarks.transmission_loss module

+
+
+

Module contents

+
+
+class nos.benchmarks.Benchmark(train_set: continuity.data.dataset.OperatorDataset, test_set: continuity.data.dataset.OperatorDataset, metrics: List[nos.metrics.metric.Metric] = <factory>)
+

Bases: object

+
+
+metrics: List[Metric]
+
+ +
+
+test_set: OperatorDataset
+
+ +
+
+train_set: OperatorDataset
+
+ +
+ +
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.data.html b/nos.data.html new file mode 100644 index 00000000..2a829850 --- /dev/null +++ b/nos.data.html @@ -0,0 +1,165 @@ + + + + + + + + nos.data package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.data package

+
+

Submodules

+
+
+

nos.data.transmssion_loss module

+
+
+class nos.data.transmssion_loss.TLDataset(csv_file: Path)
+

Bases: OperatorDataset

+
+ +
+
+class nos.data.transmssion_loss.TLDatasetCompact(path: Path, n_samples: int = -1)
+

Bases: OperatorDataset

+

Transmission loss dataset, with bigger evaluation space.

+
+ +
+
+class nos.data.transmssion_loss.TLDatasetCompactExp(path: Path, n_samples: int = -1)
+

Bases: OperatorDataset

+

Transmission loss dataset, with bigger evaluation space.

+
+ +
+
+

Module contents

+
+
+class nos.data.TLDataset(csv_file: Path)
+

Bases: OperatorDataset

+
+ +
+
+class nos.data.TLDatasetCompact(path: Path, n_samples: int = -1)
+

Bases: OperatorDataset

+

Transmission loss dataset, with bigger evaluation space.

+
+ +
+
+class nos.data.TLDatasetCompactExp(path: Path, n_samples: int = -1)
+

Bases: OperatorDataset

+

Transmission loss dataset, with bigger evaluation space.

+
+ +
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.html b/nos.html new file mode 100644 index 00000000..5db442a6 --- /dev/null +++ b/nos.html @@ -0,0 +1,283 @@ + + + + + + + + nos package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos package

+
+

Subpackages

+
+ +
+
+
+

Module contents

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.metrics.html b/nos.metrics.html new file mode 100644 index 00000000..1331c5e1 --- /dev/null +++ b/nos.metrics.html @@ -0,0 +1,250 @@ + + + + + + + + nos.metrics package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.metrics package

+
+

Submodules

+
+
+

nos.metrics.error_metrics module

+
+
+class nos.metrics.error_metrics.L1Error
+

Bases: Loss

+

L1 error metric (Mean Absolute Error).

+
+ +
+
+class nos.metrics.error_metrics.Loss(name: str, loss)
+

Bases: Metric

+

Class for evaluating error metrics.

+
+
Parameters:
+
    +
  • name – The name of the metric.

  • +
  • loss – The loss function for calculating the metric.

  • +
+
+
+
+ +
+
+class nos.metrics.error_metrics.MSError
+

Bases: Loss

+

Mean square error metric (L2 Error).

+
+ +
+
+

nos.metrics.metric module

+
+
+class nos.metrics.metric.Metric(name: str)
+

Bases: ABC

+

Base class for all metrics.

+
+
+abstract __call__(operator: Operator, dataset: OperatorDataset) Dict
+

Evaluates the metric.

+
+
Parameters:
+
    +
  • operator – operator for which the metric is evaluated.

  • +
  • dataset – dataset on which the metric is evaluated.

  • +
+
+
Returns:
+

dict containing the results of the metric (keys “value” and “unit” should be in the dict).

+
+
+
+ +
+ +
+
+

nos.metrics.operator_metrics module

+
+
+class nos.metrics.operator_metrics.NumberOfParameters
+

Bases: Metric

+

Number of parameters in the operator.

+
+ +
+
+class nos.metrics.operator_metrics.SpeedOfEvaluation
+

Bases: Metric

+

Speed of a single evaluation in milliseconds.

+
+ +
+
+

Module contents

+
+
+class nos.metrics.L1Error
+

Bases: Loss

+

L1 error metric (Mean Absolute Error).

+
+ +
+
+class nos.metrics.MSError
+

Bases: Loss

+

Mean square error metric (L2 Error).

+
+ +
+
+class nos.metrics.Metric(name: str)
+

Bases: ABC

+

Base class for all metrics.

+
+
+abstract __call__(operator: Operator, dataset: OperatorDataset) Dict
+

Evaluates the metric.

+
+
Parameters:
+
    +
  • operator – operator for which the metric is evaluated.

  • +
  • dataset – dataset on which the metric is evaluated.

  • +
+
+
Returns:
+

dict containing the results of the metric (keys “value” and “unit” should be in the dict).

+
+
+
+ +
+ +
+
+class nos.metrics.NumberOfParameters
+

Bases: Metric

+

Number of parameters in the operator.

+
+ +
+
+class nos.metrics.SpeedOfEvaluation
+

Bases: Metric

+

Speed of a single evaluation in milliseconds.

+
+ +
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.networks.html b/nos.networks.html new file mode 100644 index 00000000..55625dc9 --- /dev/null +++ b/nos.networks.html @@ -0,0 +1,205 @@ + + + + + + + + nos.networks package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.networks package

+
+

Submodules

+
+
+

nos.networks.residual module

+
+
+class nos.networks.residual.ResBlock(width: int, depth: int, act: Module)
+

Bases: Module

+
+
+forward(x: Tensor)
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +
+ +
+
+class nos.networks.residual.ResNet(width: int, depth: int, act: Module, stride: int = 1, transition_transformations: List[Module] = None)
+

Bases: Module

+
+
+forward(x: Tensor)
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +
+ +
+
+

Module contents

+
+
+class nos.networks.ResBlock(width: int, depth: int, act: Module)
+

Bases: Module

+
+
+forward(x: Tensor)
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +
+ +
+
+class nos.networks.ResNet(width: int, depth: int, act: Module, stride: int = 1, transition_transformations: List[Module] = None)
+

Bases: Module

+
+
+forward(x: Tensor)
+

Defines the computation performed at every call.

+

Should be overridden by all subclasses.

+
+

Note

+

Although the recipe for forward pass needs to be defined within +this function, one should call the Module instance afterwards +instead of this since the former takes care of running the +registered hooks while the latter silently ignores them.

+
+
+ +
+ +
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.operators.html b/nos.operators.html new file mode 100644 index 00000000..d5519141 --- /dev/null +++ b/nos.operators.html @@ -0,0 +1,297 @@ + + + + + + + + nos.operators package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.operators package

+
+

Submodules

+
+
+

nos.operators.deep_dot_operator module

+
+
+class nos.operators.deep_dot_operator.DeepDotOperator(shapes: ~continuity.operators.shape.OperatorShapes, branch_width: int = 16, branch_depth: int = 2, trunk_width: int = 16, trunk_depth: int = 2, dot_width: int = 16, dot_depth: int = 2, act: ~torch.nn.modules.module.Module = <class 'torch.nn.modules.activation.Tanh'>, stride: int = 1)
+

Bases: Operator

+
+
+forward(x: Tensor, u: Tensor, y: Tensor) Tensor
+

Forward pass through the operator.

+
+
Parameters:
+
    +
  • x – Sensor positions of shape (batch_size, num_sensors, x_dim)

  • +
  • u – Input function values of shape (batch_size, num_sensors, u_dim)

  • +
  • y – Evaluation coordinates of shape (batch_size, num_evaluations, y_dim)

  • +
+
+
Returns:
+

Evaluations of the mapped function with shape (batch_size, num_evaluations, v_dim)

+
+
+
+ +
+ +
+
+

nos.operators.deep_root_operator module

+
+
+class nos.operators.deep_root_operator.DeepRootOperator(shapes: ~continuity.operators.shape.OperatorShapes, root_width: int = 16, root_depth: int = 1, branch_width: int = 16, branch_depth: int = 2, trunk_width: int = 16, trunk_depth: int = 2, dot_width: int = 16, dot_depth: int = 2, act: ~torch.nn.modules.module.Module = <class 'torch.nn.modules.activation.Tanh'>, stride: int = 1)
+

Bases: Operator

+
+
+forward(x: Tensor, u: Tensor, y: Tensor) Tensor
+

Forward pass through the operator.

+
+
Parameters:
+
    +
  • x – Sensor positions of shape (batch_size, num_sensors, x_dim)

  • +
  • u – Input function values of shape (batch_size, num_sensors, u_dim)

  • +
  • y – Evaluation coordinates of shape (batch_size, num_evaluations, y_dim)

  • +
+
+
Returns:
+

Evaluations of the mapped function with shape (batch_size, num_evaluations, v_dim)

+
+
+
+ +
+ +
+
+

nos.operators.mean_stack_neural_operator module

+

continuity.operators.deep_neural_operator

+

The Deep Neural Operator architecture.

+
+
+class nos.operators.mean_stack_neural_operator.MeanStackNeuralOperator(shapes: OperatorShapes, width: int = 32, depth: int = 3)
+

Bases: Operator

+

The MeanStackNeuralOperator class integrates a deep residual network within a neural operator framework. It uses all +scalar values of the input locations, input functions, and individual evaluation points as inputs for a deep +residual network.

+
+
Parameters:
+
    +
  • shapes – An instance of DatasetShapes.

  • +
  • width – The width of the Deep Residual Network, defining the number of neurons in each hidden layer.

  • +
  • depth – The depth of the Deep Residual Network, indicating the number of hidden layers in the network.

  • +
+
+
+
+
+forward(x: Tensor, u: Tensor, y: Tensor) Tensor
+

Forward pass through the operator.

+

Performs the forward pass through the operator, processing the input function values u and input function +probe locations x by flattening them. They are then expanded to match the dimensions of the evaluation +coordinates y. The preprocessed x, preprocessed u, and y are stacked and passed through a deep residual network.

+
+
Parameters:
+
    +
  • x – Input coordinates of shape (batch_size, #sensors, x_dim), representing the points in space at +which the input function values are probed.

  • +
  • u – Input function values of shape (batch_size, #sensors, u_dim), representing the values of the input +functions at different sensor locations.

  • +
  • y – Evaluation coordinates of shape (batch_size, #evaluations, y_dim), representing the points in space at +which the output function values are to be computed.

  • +
+
+
Returns:
+

+
The output of the operator, of shape (batch_size, #evaluations, v_dim), representing the computed function

values at the specified evaluation coordinates.

+
+
+

+
+
+
+ +
+ +
+
+

Module contents

+
+
+class nos.operators.DeepDotOperator(shapes: ~continuity.operators.shape.OperatorShapes, branch_width: int = 16, branch_depth: int = 2, trunk_width: int = 16, trunk_depth: int = 2, dot_width: int = 16, dot_depth: int = 2, act: ~torch.nn.modules.module.Module = <class 'torch.nn.modules.activation.Tanh'>, stride: int = 1)
+

Bases: Operator

+
+
+forward(x: Tensor, u: Tensor, y: Tensor) Tensor
+

Forward pass through the operator.

+
+
Parameters:
+
    +
  • x – Sensor positions of shape (batch_size, num_sensors, x_dim)

  • +
  • u – Input function values of shape (batch_size, num_sensors, u_dim)

  • +
  • y – Evaluation coordinates of shape (batch_size, num_evaluations, y_dim)

  • +
+
+
Returns:
+

Evaluations of the mapped function with shape (batch_size, num_evaluations, v_dim)

+
+
+
+ +
+ +
+
+class nos.operators.MeanStackNeuralOperator(shapes: OperatorShapes, width: int = 32, depth: int = 3)
+

Bases: Operator

+

The MeanStackNeuralOperator class integrates a deep residual network within a neural operator framework. It uses all +scalar values of the input locations, input functions, and individual evaluation points as inputs for a deep +residual network.

+
+
Parameters:
+
    +
  • shapes – An instance of DatasetShapes.

  • +
  • width – The width of the Deep Residual Network, defining the number of neurons in each hidden layer.

  • +
  • depth – The depth of the Deep Residual Network, indicating the number of hidden layers in the network.

  • +
+
+
+
+
+forward(x: Tensor, u: Tensor, y: Tensor) Tensor
+

Forward pass through the operator.

+

Performs the forward pass through the operator, processing the input function values u and input function +probe locations x by flattening them. They are then expanded to match the dimensions of the evaluation +coordinates y. The preprocessed x, preprocessed u, and y are stacked and passed through a deep residual network.

+
+
Parameters:
+
    +
  • x – Input coordinates of shape (batch_size, #sensors, x_dim), representing the points in space at +which the input function values are probed.

  • +
  • u – Input function values of shape (batch_size, #sensors, u_dim), representing the values of the input +functions at different sensor locations.

  • +
  • y – Evaluation coordinates of shape (batch_size, #evaluations, y_dim), representing the points in space at +which the output function values are to be computed.

  • +
+
+
Returns:
+

+
The output of the operator, of shape (batch_size, #evaluations, v_dim), representing the computed function

values at the specified evaluation coordinates.

+
+
+

+
+
+
+ +
+ +
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.trainers.html b/nos.trainers.html new file mode 100644 index 00000000..9004bf54 --- /dev/null +++ b/nos.trainers.html @@ -0,0 +1,128 @@ + + + + + + + + nos.trainers package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.trainers package

+
+

Submodules

+
+
+

nos.trainers.average_metric module

+
+
+

nos.trainers.trainer module

+
+
+

Module contents

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/nos.transforms.html b/nos.transforms.html new file mode 100644 index 00000000..26e8d476 --- /dev/null +++ b/nos.transforms.html @@ -0,0 +1,165 @@ + + + + + + + + nos.transforms package — Neural Operators documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

nos.transforms package

+
+

Submodules

+
+
+

nos.transforms.scaling module

+
+
+class nos.transforms.scaling.MinMaxScale(min_value: Tensor, max_value: Tensor)
+

Bases: Transform

+
+
+forward(tensor: Tensor) Tensor
+

Applies the transformation.

+
+
Parameters:
+

tensor – Tensor that should be transformed.

+
+
Returns:
+

Transformed tensor.

+
+
+
+ +
+ +
+
+

Module contents

+
+
+class nos.transforms.MinMaxScale(min_value: Tensor, max_value: Tensor)
+

Bases: Transform

+
+
+forward(tensor: Tensor) Tensor
+

Applies the transformation.

+
+
Parameters:
+

tensor – Tensor that should be transformed.

+
+
Returns:
+

Transformed tensor.

+
+
+
+ +
+ +
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/objects.inv b/objects.inv new file mode 100644 index 0000000000000000000000000000000000000000..803dc370c5681566a9392bca40561925e5835261 GIT binary patch literal 912 zcmV;B18@8zAX9K?X>NERX>N99Zgg*Qc_4OWa&u{KZXhxWBOp+6Z)#;@bUGkTWp#34 zY#>i?WpZJ3Z*p@ABOq2~a&u{KZaN?eBOp|0Wgv28ZDDC{WMy(7Z)PBLXlZjGW@&6? zAZc?TV{dJ6a%FRKWn>_Ab7^j8AbM|z2_?|ZLf*8*B;wO+LfB7 zYW9vCm^LWJW^JcU_v_bS47Py~>|P3;nfL5@GmycQ2~>XF6)ZauKtB;>Vs(%O`JpY# z8?;wa?fxVoghzsqi2xZ- z#Boj|nD(|Tfzr0OQ6dkpM9MwH7%BW9Wn>8yo{AzDB4;!qOw8``3`^X@RL0Th)t{$F zb!ql&7pj8|$3L*5#AzsOeJ-y=YEdp|aydUAK0D!Yf_Vx|@?0f}^&B`NH1A}kO!7s1Aaagd*Iu*7tFD;0T;8@-jx8^_HF%v z$q7qUo>^$9YzR3SE+e$g>kBfkGL0 zsA4^QY#w16dgz&(Fg@Gvw$?MKV&{DXC?A>JHU@w99?c|vbpMk?8^ClYae91)6UQWY zLnA!l8_QINNycQttStp;-D8SYf08jWZO*G*Z?-{Ku;PBS>y zp#<-NRKdoogMU}Ii!Q8Tv?6rFp>6v^r4m5DfD{Kt23SX5m$AC)aT!^62&{?^1{83f8?KtStRvAfCq0sCQ)}(U_s0T z?4j(@5;7PcKVJ+Lz?Aa^LW;N9@x;oYYnd4q<iS(&FiF3V7EH* mUp@_q)gBXl^YW!0B + + + + + + Python Module Index — Neural Operators documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ + +

Python Module Index

+ +
+ n +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ n
+ nos +
    + nos.benchmarks +
    + nos.benchmarks.benchmark +
    + nos.benchmarks.transmission_loss +
    + nos.data +
    + nos.data.transmssion_loss +
    + nos.metrics +
    + nos.metrics.error_metrics +
    + nos.metrics.metric +
    + nos.metrics.operator_metrics +
    + nos.networks +
    + nos.networks.residual +
    + nos.operators +
    + nos.operators.deep_dot_operator +
    + nos.operators.deep_root_operator +
    + nos.operators.mean_stack_neural_operator +
    + nos.transforms +
    + nos.transforms.scaling +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/search.html b/search.html new file mode 100644 index 00000000..195c7f05 --- /dev/null +++ b/search.html @@ -0,0 +1,123 @@ + + + + + + + Search — Neural Operators documentation + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +

Search

+ + + + +

+ Searching for multiple words only shows matches that contain + all words. +

+ + +
+ + + +
+ + + +
+ +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/searchindex.js b/searchindex.js new file mode 100644 index 00000000..623790bb --- /dev/null +++ b/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["coverage", "examples", "index", "modules", "nos", "nos.benchmarks", "nos.data", "nos.metrics", "nos.networks", "nos.operators", "nos.trainers", "nos.transforms"], "filenames": ["coverage.rst", "examples.rst", "index.rst", "modules.rst", "nos.rst", "nos.benchmarks.rst", "nos.data.rst", "nos.metrics.rst", "nos.networks.rst", "nos.operators.rst", "nos.trainers.rst", "nos.transforms.rst"], "titles": ["Code Coverage", "Examples", "Neural Operators for Helmholtz Equation", "nos", "nos package", "nos.benchmarks package", "nos.data package", "nos.metrics package", "nos.networks package", "nos.operators package", "nos.trainers package", "nos.transforms package"], "terms": {"For": 0, "an": [0, 9], "detail": [0, 2], "report": 0, "click": 0, "here": 0, "welcom": 2, "document": 2, "project": 2, "thi": [2, 8], "focus": 2, "applic": 2, "advanc": 2, "techniqu": 2, "solv": 2, "variou": 2, "parameter": 2, "geometri": 2, "The": [2, 7, 9], "approach": 2, "integr": [2, 9], "deep": [2, 9], "learn": 2, "physic": 2, "model": 2, "domain": 2, "code": 2, "coverag": 2, "nos": 2, "packag": [2, 3], "i": [2, 7], "fundament": 2, "partial": 2, "differenti": 2, "particularli": 2, "field": 2, "acoust": 2, "electromagnet": 2, "quantum": 2, "mechan": 2, "In": 2, "we": 2, "develop": 2, "util": 2, "form": 2, "across": 2, "differ": [2, 9], "aim": 2, "overcom": 2, "limit": 2, "tradit": 2, "numer": 2, "method": 2, "offer": 2, "speed": [2, 7], "flexibl": 2, "complex": 2, "section": 2, "descript": 2, "function": [2, 7, 8, 9], "class": [2, 5, 6, 7, 8, 9, 11], "avail": 2, "explor": 2, "practic": 2, "see": 2, "action": 2, "subpackag": 3, "benchmark": [3, 4], "submodul": [3, 4], "modul": 3, "transmission_loss": [3, 4], "content": 3, "data": [3, 4, 5], "transmssion_loss": [3, 4], "metric": [3, 4, 5], "error_metr": [3, 4], "operator_metr": [3, 4], "network": [3, 4, 9], "residu": [3, 4, 9], "oper": [3, 4, 7], "deep_dot_oper": [3, 4], "deep_root_oper": [3, 4], "mean_stack_neural_oper": [3, 4], "trainer": [3, 4], "average_metr": [3, 4], "transform": [3, 4], "scale": [3, 4], "test_set": [4, 5], "train_set": [4, 5], "tldataset": [4, 6], "tldatasetcompact": [4, 6], "tldatasetcompactexp": [4, 6], "l1error": [4, 7], "loss": [4, 6, 7], "mserror": [4, 7], "__call__": [4, 7], "numberofparamet": [4, 7], "speedofevalu": [4, 7], "resblock": [4, 8], "forward": [4, 8, 9, 11], "resnet": [4, 8], "deepdotoper": [4, 9], "deeprootoper": [4, 9], "meanstackneuraloper": [4, 9], "minmaxscal": [4, 11], "continu": [5, 9], "dataset": [5, 6, 7], "operatordataset": [5, 6, 7], "list": [5, 8], "factori": 5, "base": [5, 6, 7, 8, 9, 11], "object": 5, "csv_file": 6, "path": 6, "n_sampl": 6, "int": [6, 8, 9], "1": [6, 8, 9], "transmiss": 6, "bigger": 6, "evalu": [6, 7, 9], "space": [6, 9], "l1": 7, "error": 7, "mean": 7, "absolut": 7, "name": 7, "str": 7, "paramet": [7, 9, 11], "calcul": 7, "squar": 7, "l2": 7, "abc": 7, "all": [7, 8, 9], "abstract": 7, "dict": 7, "which": [7, 9], "return": [7, 9, 11], "contain": 7, "result": 7, "kei": 7, "valu": [7, 9], "unit": 7, "should": [7, 8, 11], "number": [7, 9], "singl": 7, "millisecond": 7, "width": [8, 9], "depth": [8, 9], "act": [8, 9], "x": [8, 9], "tensor": [8, 9, 11], "defin": [8, 9], "comput": [8, 9], "perform": [8, 9], "everi": 8, "call": 8, "overridden": 8, "subclass": 8, "although": 8, "recip": 8, "pass": [8, 9], "need": 8, "within": [8, 9], "one": 8, "instanc": [8, 9], "afterward": 8, "instead": 8, "sinc": 8, "former": 8, "take": 8, "care": 8, "run": 8, "regist": 8, "hook": 8, "while": 8, "latter": 8, "silent": 8, "ignor": 8, "them": [8, 9], "stride": [8, 9], "transition_transform": 8, "none": 8, "shape": 9, "operatorshap": 9, "branch_width": 9, "16": 9, "branch_depth": 9, "2": 9, "trunk_width": 9, "trunk_depth": 9, "dot_width": 9, "dot_depth": 9, "torch": 9, "nn": 9, "activ": 9, "tanh": 9, "u": 9, "y": 9, "through": 9, "sensor": 9, "posit": 9, "batch_siz": 9, "num_sensor": 9, "x_dim": 9, "input": 9, "u_dim": 9, "coordin": 9, "num_evalu": 9, "y_dim": 9, "map": 9, "v_dim": 9, "root_width": 9, "root_depth": 9, "deep_neural_oper": 9, "neural": 9, "architectur": 9, "32": 9, "3": 9, "framework": 9, "It": 9, "us": 9, "scalar": 9, "locat": 9, "individu": 9, "point": 9, "datasetshap": 9, "neuron": 9, "each": 9, "hidden": 9, "layer": 9, "indic": 9, "process": 9, "probe": 9, "flatten": 9, "thei": 9, "ar": 9, "expand": 9, "match": 9, "dimens": 9, "preprocess": 9, "stack": 9, "repres": 9, "output": 9, "specifi": 9, "min_valu": 11, "max_valu": 11, "appli": 11}, "objects": {"": [[4, 0, 0, "-", "nos"]], "nos": [[5, 0, 0, "-", "benchmarks"], [6, 0, 0, "-", "data"], [7, 0, 0, "-", "metrics"], [8, 0, 0, "-", "networks"], [9, 0, 0, "-", "operators"], [11, 0, 0, "-", "transforms"]], "nos.benchmarks": [[5, 1, 1, "", "Benchmark"], [5, 0, 0, "-", "benchmark"], [5, 0, 0, "-", "transmission_loss"]], "nos.benchmarks.Benchmark": [[5, 2, 1, "", "metrics"], [5, 2, 1, "", "test_set"], [5, 2, 1, "", "train_set"]], "nos.benchmarks.benchmark": [[5, 1, 1, "", "Benchmark"]], "nos.benchmarks.benchmark.Benchmark": [[5, 2, 1, "", "metrics"], [5, 2, 1, "", "test_set"], [5, 2, 1, "", "train_set"]], "nos.data": [[6, 1, 1, "", "TLDataset"], [6, 1, 1, "", "TLDatasetCompact"], [6, 1, 1, "", "TLDatasetCompactExp"], [6, 0, 0, "-", "transmssion_loss"]], "nos.data.transmssion_loss": [[6, 1, 1, "", "TLDataset"], [6, 1, 1, "", "TLDatasetCompact"], [6, 1, 1, "", "TLDatasetCompactExp"]], "nos.metrics": [[7, 1, 1, "", "L1Error"], [7, 1, 1, "", "MSError"], [7, 1, 1, "", "Metric"], [7, 1, 1, "", "NumberOfParameters"], [7, 1, 1, "", "SpeedOfEvaluation"], [7, 0, 0, "-", "error_metrics"], [7, 0, 0, "-", "metric"], [7, 0, 0, "-", "operator_metrics"]], "nos.metrics.Metric": [[7, 3, 1, "", "__call__"]], "nos.metrics.error_metrics": [[7, 1, 1, "", "L1Error"], [7, 1, 1, "", "Loss"], [7, 1, 1, "", "MSError"]], "nos.metrics.metric": [[7, 1, 1, "", "Metric"]], "nos.metrics.metric.Metric": [[7, 3, 1, "", "__call__"]], "nos.metrics.operator_metrics": [[7, 1, 1, "", "NumberOfParameters"], [7, 1, 1, "", "SpeedOfEvaluation"]], "nos.networks": [[8, 1, 1, "", "ResBlock"], [8, 1, 1, "", "ResNet"], [8, 0, 0, "-", "residual"]], "nos.networks.ResBlock": [[8, 3, 1, "", "forward"]], "nos.networks.ResNet": [[8, 3, 1, "", "forward"]], "nos.networks.residual": [[8, 1, 1, "", "ResBlock"], [8, 1, 1, "", "ResNet"]], "nos.networks.residual.ResBlock": [[8, 3, 1, "", "forward"]], "nos.networks.residual.ResNet": [[8, 3, 1, "", "forward"]], "nos.operators": [[9, 1, 1, "", "DeepDotOperator"], [9, 1, 1, "", "MeanStackNeuralOperator"], [9, 0, 0, "-", "deep_dot_operator"], [9, 0, 0, "-", "deep_root_operator"], [9, 0, 0, "-", "mean_stack_neural_operator"]], "nos.operators.DeepDotOperator": [[9, 3, 1, "", "forward"]], "nos.operators.MeanStackNeuralOperator": [[9, 3, 1, "", "forward"]], "nos.operators.deep_dot_operator": [[9, 1, 1, "", "DeepDotOperator"]], "nos.operators.deep_dot_operator.DeepDotOperator": [[9, 3, 1, "", "forward"]], "nos.operators.deep_root_operator": [[9, 1, 1, "", "DeepRootOperator"]], "nos.operators.deep_root_operator.DeepRootOperator": [[9, 3, 1, "", "forward"]], "nos.operators.mean_stack_neural_operator": [[9, 1, 1, "", "MeanStackNeuralOperator"]], "nos.operators.mean_stack_neural_operator.MeanStackNeuralOperator": [[9, 3, 1, "", "forward"]], "nos.transforms": [[11, 1, 1, "", "MinMaxScale"], [11, 0, 0, "-", "scaling"]], "nos.transforms.MinMaxScale": [[11, 3, 1, "", "forward"]], "nos.transforms.scaling": [[11, 1, 1, "", "MinMaxScale"]], "nos.transforms.scaling.MinMaxScale": [[11, 3, 1, "", "forward"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:method"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "method", "Python method"]}, "titleterms": {"code": 0, "coverag": 0, "exampl": [1, 2], "neural": 2, "oper": [2, 9], "helmholtz": 2, "equat": 2, "content": [2, 4, 5, 6, 7, 8, 9, 10, 11], "introduct": 2, "api": 2, "refer": 2, "nos": [3, 4, 5, 6, 7, 8, 9, 10, 11], "packag": [4, 5, 6, 7, 8, 9, 10, 11], "subpackag": 4, "modul": [4, 5, 6, 7, 8, 9, 10, 11], "benchmark": 5, "submodul": [5, 6, 7, 8, 9, 10, 11], "transmission_loss": 5, "data": 6, "transmssion_loss": 6, "metric": 7, "error_metr": 7, "operator_metr": 7, "network": 8, "residu": 8, "deep_dot_oper": 9, "deep_root_oper": 9, "mean_stack_neural_oper": 9, "trainer": 10, "average_metr": 10, "transform": 11, "scale": 11}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx": 60}, "alltitles": {"Code Coverage": [[0, "code-coverage"]], "Examples": [[1, "examples"], [2, "examples"]], "Neural Operators for Helmholtz Equation": [[2, "neural-operators-for-helmholtz-equation"]], "Contents": [[2, "contents"]], "Introduction": [[2, "introduction"]], "API Reference": [[2, "api-reference"]], "nos": [[3, "nos"]], "nos package": [[4, "nos-package"]], "Subpackages": [[4, "subpackages"]], "Module contents": [[4, "module-nos"], [5, "module-nos.benchmarks"], [6, "module-nos.data"], [7, "module-nos.metrics"], [8, "module-nos.networks"], [9, "module-nos.operators"], [10, "module-contents"], [11, "module-nos.transforms"]], "nos.benchmarks package": [[5, "nos-benchmarks-package"]], "Submodules": [[5, "submodules"], [6, "submodules"], [7, "submodules"], [8, "submodules"], [9, "submodules"], [10, "submodules"], [11, "submodules"]], "nos.benchmarks.benchmark module": [[5, "module-nos.benchmarks.benchmark"]], "nos.benchmarks.transmission_loss module": [[5, "module-nos.benchmarks.transmission_loss"]], "nos.data package": [[6, "nos-data-package"]], "nos.data.transmssion_loss module": [[6, "module-nos.data.transmssion_loss"]], "nos.metrics package": [[7, "nos-metrics-package"]], "nos.metrics.error_metrics module": [[7, "module-nos.metrics.error_metrics"]], "nos.metrics.metric module": [[7, "module-nos.metrics.metric"]], "nos.metrics.operator_metrics module": [[7, "module-nos.metrics.operator_metrics"]], "nos.networks package": [[8, "nos-networks-package"]], "nos.networks.residual module": [[8, "module-nos.networks.residual"]], "nos.operators package": [[9, "nos-operators-package"]], "nos.operators.deep_dot_operator module": [[9, "module-nos.operators.deep_dot_operator"]], "nos.operators.deep_root_operator module": [[9, "module-nos.operators.deep_root_operator"]], "nos.operators.mean_stack_neural_operator module": [[9, "module-nos.operators.mean_stack_neural_operator"]], "nos.trainers package": [[10, "nos-trainers-package"]], "nos.trainers.average_metric module": [[10, "nos-trainers-average-metric-module"]], "nos.trainers.trainer module": [[10, "nos-trainers-trainer-module"]], "nos.transforms package": [[11, "nos-transforms-package"]], "nos.transforms.scaling module": [[11, "module-nos.transforms.scaling"]]}, "indexentries": {"module": [[4, "module-nos"], [5, "module-nos.benchmarks"], [5, "module-nos.benchmarks.benchmark"], [5, "module-nos.benchmarks.transmission_loss"], [6, "module-nos.data"], [6, "module-nos.data.transmssion_loss"], [7, "module-nos.metrics"], [7, "module-nos.metrics.error_metrics"], [7, "module-nos.metrics.metric"], [7, "module-nos.metrics.operator_metrics"], [8, "module-nos.networks"], [8, "module-nos.networks.residual"], [9, "module-nos.operators"], [9, "module-nos.operators.deep_dot_operator"], [9, "module-nos.operators.deep_root_operator"], [9, "module-nos.operators.mean_stack_neural_operator"], [11, "module-nos.transforms"], [11, "module-nos.transforms.scaling"]], "nos": [[4, "module-nos"]], "benchmark (class in nos.benchmarks)": [[5, "nos.benchmarks.Benchmark"]], "benchmark (class in nos.benchmarks.benchmark)": [[5, "nos.benchmarks.benchmark.Benchmark"]], "metrics (nos.benchmarks.benchmark attribute)": [[5, "nos.benchmarks.Benchmark.metrics"]], "metrics (nos.benchmarks.benchmark.benchmark attribute)": [[5, "nos.benchmarks.benchmark.Benchmark.metrics"]], "nos.benchmarks": [[5, "module-nos.benchmarks"]], "nos.benchmarks.benchmark": [[5, "module-nos.benchmarks.benchmark"]], "nos.benchmarks.transmission_loss": [[5, "module-nos.benchmarks.transmission_loss"]], "test_set (nos.benchmarks.benchmark attribute)": [[5, "nos.benchmarks.Benchmark.test_set"]], "test_set (nos.benchmarks.benchmark.benchmark attribute)": [[5, "nos.benchmarks.benchmark.Benchmark.test_set"]], "train_set (nos.benchmarks.benchmark attribute)": [[5, "nos.benchmarks.Benchmark.train_set"]], "train_set (nos.benchmarks.benchmark.benchmark attribute)": [[5, "nos.benchmarks.benchmark.Benchmark.train_set"]], "tldataset (class in nos.data)": [[6, "nos.data.TLDataset"]], "tldataset (class in nos.data.transmssion_loss)": [[6, "nos.data.transmssion_loss.TLDataset"]], "tldatasetcompact (class in nos.data)": [[6, "nos.data.TLDatasetCompact"]], "tldatasetcompact (class in nos.data.transmssion_loss)": [[6, "nos.data.transmssion_loss.TLDatasetCompact"]], "tldatasetcompactexp (class in nos.data)": [[6, "nos.data.TLDatasetCompactExp"]], "tldatasetcompactexp (class in nos.data.transmssion_loss)": [[6, "nos.data.transmssion_loss.TLDatasetCompactExp"]], "nos.data": [[6, "module-nos.data"]], "nos.data.transmssion_loss": [[6, "module-nos.data.transmssion_loss"]], "l1error (class in nos.metrics)": [[7, "nos.metrics.L1Error"]], "l1error (class in nos.metrics.error_metrics)": [[7, "nos.metrics.error_metrics.L1Error"]], "loss (class in nos.metrics.error_metrics)": [[7, "nos.metrics.error_metrics.Loss"]], "mserror (class in nos.metrics)": [[7, "nos.metrics.MSError"]], "mserror (class in nos.metrics.error_metrics)": [[7, "nos.metrics.error_metrics.MSError"]], "metric (class in nos.metrics)": [[7, "nos.metrics.Metric"]], "metric (class in nos.metrics.metric)": [[7, "nos.metrics.metric.Metric"]], "numberofparameters (class in nos.metrics)": [[7, "nos.metrics.NumberOfParameters"]], "numberofparameters (class in nos.metrics.operator_metrics)": [[7, "nos.metrics.operator_metrics.NumberOfParameters"]], "speedofevaluation (class in nos.metrics)": [[7, "nos.metrics.SpeedOfEvaluation"]], "speedofevaluation (class in nos.metrics.operator_metrics)": [[7, "nos.metrics.operator_metrics.SpeedOfEvaluation"]], "__call__() (nos.metrics.metric method)": [[7, "nos.metrics.Metric.__call__"]], "__call__() (nos.metrics.metric.metric method)": [[7, "nos.metrics.metric.Metric.__call__"]], "nos.metrics": [[7, "module-nos.metrics"]], "nos.metrics.error_metrics": [[7, "module-nos.metrics.error_metrics"]], "nos.metrics.metric": [[7, "module-nos.metrics.metric"]], "nos.metrics.operator_metrics": [[7, "module-nos.metrics.operator_metrics"]], "resblock (class in nos.networks)": [[8, "nos.networks.ResBlock"]], "resblock (class in nos.networks.residual)": [[8, "nos.networks.residual.ResBlock"]], "resnet (class in nos.networks)": [[8, "nos.networks.ResNet"]], "resnet (class in nos.networks.residual)": [[8, "nos.networks.residual.ResNet"]], "forward() (nos.networks.resblock method)": [[8, "nos.networks.ResBlock.forward"]], "forward() (nos.networks.resnet method)": [[8, "nos.networks.ResNet.forward"]], "forward() (nos.networks.residual.resblock method)": [[8, "nos.networks.residual.ResBlock.forward"]], "forward() (nos.networks.residual.resnet method)": [[8, "nos.networks.residual.ResNet.forward"]], "nos.networks": [[8, "module-nos.networks"]], "nos.networks.residual": [[8, "module-nos.networks.residual"]], "deepdotoperator (class in nos.operators)": [[9, "nos.operators.DeepDotOperator"]], "deepdotoperator (class in nos.operators.deep_dot_operator)": [[9, "nos.operators.deep_dot_operator.DeepDotOperator"]], "deeprootoperator (class in nos.operators.deep_root_operator)": [[9, "nos.operators.deep_root_operator.DeepRootOperator"]], "meanstackneuraloperator (class in nos.operators)": [[9, "nos.operators.MeanStackNeuralOperator"]], "meanstackneuraloperator (class in nos.operators.mean_stack_neural_operator)": [[9, "nos.operators.mean_stack_neural_operator.MeanStackNeuralOperator"]], "forward() (nos.operators.deepdotoperator method)": [[9, "nos.operators.DeepDotOperator.forward"]], "forward() (nos.operators.meanstackneuraloperator method)": [[9, "nos.operators.MeanStackNeuralOperator.forward"]], "forward() (nos.operators.deep_dot_operator.deepdotoperator method)": [[9, "nos.operators.deep_dot_operator.DeepDotOperator.forward"]], "forward() (nos.operators.deep_root_operator.deeprootoperator method)": [[9, "nos.operators.deep_root_operator.DeepRootOperator.forward"]], "forward() (nos.operators.mean_stack_neural_operator.meanstackneuraloperator method)": [[9, "nos.operators.mean_stack_neural_operator.MeanStackNeuralOperator.forward"]], "nos.operators": [[9, "module-nos.operators"]], "nos.operators.deep_dot_operator": [[9, "module-nos.operators.deep_dot_operator"]], "nos.operators.deep_root_operator": [[9, "module-nos.operators.deep_root_operator"]], "nos.operators.mean_stack_neural_operator": [[9, "module-nos.operators.mean_stack_neural_operator"]], "minmaxscale (class in nos.transforms)": [[11, "nos.transforms.MinMaxScale"]], "minmaxscale (class in nos.transforms.scaling)": [[11, "nos.transforms.scaling.MinMaxScale"]], "forward() (nos.transforms.minmaxscale method)": [[11, "nos.transforms.MinMaxScale.forward"]], "forward() (nos.transforms.scaling.minmaxscale method)": [[11, "nos.transforms.scaling.MinMaxScale.forward"]], "nos.transforms": [[11, "module-nos.transforms"]], "nos.transforms.scaling": [[11, "module-nos.transforms.scaling"]]}}) \ No newline at end of file