From 34ee63de3759840d3b3f8e2196c691044c822ed0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20M=C3=BCller?= Date: Thu, 11 Jul 2024 23:33:37 +0200 Subject: [PATCH 01/26] rename package to gstools_cython --- src/{gstools => gstools_cython}/__init__.py | 0 src/{gstools => gstools_cython}/config.py | 0 src/{gstools => gstools_cython}/covmodel/__init__.py | 0 src/{gstools => gstools_cython}/covmodel/base.py | 0 src/{gstools => gstools_cython}/covmodel/fit.py | 0 src/{gstools => gstools_cython}/covmodel/models.py | 0 src/{gstools => gstools_cython}/covmodel/plot.py | 0 src/{gstools => gstools_cython}/covmodel/tools.py | 0 src/{gstools => gstools_cython}/covmodel/tpl_models.py | 0 src/{gstools => gstools_cython}/field/__init__.py | 0 src/{gstools => gstools_cython}/field/base.py | 0 src/{gstools => gstools_cython}/field/cond_srf.py | 0 src/{gstools => gstools_cython}/field/generator.py | 0 src/{gstools => gstools_cython}/field/plot.py | 0 src/{gstools => gstools_cython}/field/srf.py | 0 src/{gstools => gstools_cython}/field/summator.pyx | 0 src/{gstools => gstools_cython}/field/tools.py | 0 src/{gstools => gstools_cython}/field/upscaling.py | 0 src/{gstools => gstools_cython}/krige/__init__.py | 0 src/{gstools => gstools_cython}/krige/base.py | 0 src/{gstools => gstools_cython}/krige/krigesum.pyx | 0 src/{gstools => gstools_cython}/krige/methods.py | 0 src/{gstools => gstools_cython}/krige/tools.py | 0 src/{gstools => gstools_cython}/normalizer/__init__.py | 0 src/{gstools => gstools_cython}/normalizer/base.py | 0 src/{gstools => gstools_cython}/normalizer/methods.py | 0 src/{gstools => gstools_cython}/normalizer/tools.py | 0 src/{gstools => gstools_cython}/random/__init__.py | 0 src/{gstools => gstools_cython}/random/rng.py | 0 src/{gstools => gstools_cython}/random/tools.py | 0 src/{gstools => gstools_cython}/tools/__init__.py | 0 src/{gstools => gstools_cython}/tools/export.py | 0 src/{gstools => gstools_cython}/tools/geometric.py | 0 src/{gstools => gstools_cython}/tools/misc.py | 0 src/{gstools => gstools_cython}/tools/special.py | 0 src/{gstools => gstools_cython}/transform/__init__.py | 0 src/{gstools => gstools_cython}/transform/array.py | 0 src/{gstools => gstools_cython}/transform/field.py | 0 src/{gstools => gstools_cython}/variogram/__init__.py | 0 src/{gstools => gstools_cython}/variogram/binning.py | 0 src/{gstools => gstools_cython}/variogram/estimator.pyx | 0 src/{gstools => gstools_cython}/variogram/variogram.py | 0 42 files changed, 0 insertions(+), 0 deletions(-) rename src/{gstools => gstools_cython}/__init__.py (100%) rename src/{gstools => gstools_cython}/config.py (100%) rename src/{gstools => gstools_cython}/covmodel/__init__.py (100%) rename src/{gstools => gstools_cython}/covmodel/base.py (100%) rename src/{gstools => gstools_cython}/covmodel/fit.py (100%) rename src/{gstools => gstools_cython}/covmodel/models.py (100%) rename src/{gstools => gstools_cython}/covmodel/plot.py (100%) rename src/{gstools => gstools_cython}/covmodel/tools.py (100%) rename src/{gstools => gstools_cython}/covmodel/tpl_models.py (100%) rename src/{gstools => gstools_cython}/field/__init__.py (100%) rename src/{gstools => gstools_cython}/field/base.py (100%) rename src/{gstools => gstools_cython}/field/cond_srf.py (100%) rename src/{gstools => gstools_cython}/field/generator.py (100%) rename src/{gstools => gstools_cython}/field/plot.py (100%) rename src/{gstools => gstools_cython}/field/srf.py (100%) rename src/{gstools => gstools_cython}/field/summator.pyx (100%) rename src/{gstools => gstools_cython}/field/tools.py (100%) rename src/{gstools => gstools_cython}/field/upscaling.py (100%) rename src/{gstools => gstools_cython}/krige/__init__.py (100%) rename src/{gstools => gstools_cython}/krige/base.py (100%) rename src/{gstools => gstools_cython}/krige/krigesum.pyx (100%) rename src/{gstools => gstools_cython}/krige/methods.py (100%) rename src/{gstools => gstools_cython}/krige/tools.py (100%) rename src/{gstools => gstools_cython}/normalizer/__init__.py (100%) rename src/{gstools => gstools_cython}/normalizer/base.py (100%) rename src/{gstools => gstools_cython}/normalizer/methods.py (100%) rename src/{gstools => gstools_cython}/normalizer/tools.py (100%) rename src/{gstools => gstools_cython}/random/__init__.py (100%) rename src/{gstools => gstools_cython}/random/rng.py (100%) rename src/{gstools => gstools_cython}/random/tools.py (100%) rename src/{gstools => gstools_cython}/tools/__init__.py (100%) rename src/{gstools => gstools_cython}/tools/export.py (100%) rename src/{gstools => gstools_cython}/tools/geometric.py (100%) rename src/{gstools => gstools_cython}/tools/misc.py (100%) rename src/{gstools => gstools_cython}/tools/special.py (100%) rename src/{gstools => gstools_cython}/transform/__init__.py (100%) rename src/{gstools => gstools_cython}/transform/array.py (100%) rename src/{gstools => gstools_cython}/transform/field.py (100%) rename src/{gstools => gstools_cython}/variogram/__init__.py (100%) rename src/{gstools => gstools_cython}/variogram/binning.py (100%) rename src/{gstools => gstools_cython}/variogram/estimator.pyx (100%) rename src/{gstools => gstools_cython}/variogram/variogram.py (100%) diff --git a/src/gstools/__init__.py b/src/gstools_cython/__init__.py similarity index 100% rename from src/gstools/__init__.py rename to src/gstools_cython/__init__.py diff --git a/src/gstools/config.py b/src/gstools_cython/config.py similarity index 100% rename from src/gstools/config.py rename to src/gstools_cython/config.py diff --git a/src/gstools/covmodel/__init__.py b/src/gstools_cython/covmodel/__init__.py similarity index 100% rename from src/gstools/covmodel/__init__.py rename to src/gstools_cython/covmodel/__init__.py diff --git a/src/gstools/covmodel/base.py b/src/gstools_cython/covmodel/base.py similarity index 100% rename from src/gstools/covmodel/base.py rename to src/gstools_cython/covmodel/base.py diff --git a/src/gstools/covmodel/fit.py b/src/gstools_cython/covmodel/fit.py similarity index 100% rename from src/gstools/covmodel/fit.py rename to src/gstools_cython/covmodel/fit.py diff --git a/src/gstools/covmodel/models.py b/src/gstools_cython/covmodel/models.py similarity index 100% rename from src/gstools/covmodel/models.py rename to src/gstools_cython/covmodel/models.py diff --git a/src/gstools/covmodel/plot.py b/src/gstools_cython/covmodel/plot.py similarity index 100% rename from src/gstools/covmodel/plot.py rename to src/gstools_cython/covmodel/plot.py diff --git a/src/gstools/covmodel/tools.py b/src/gstools_cython/covmodel/tools.py similarity index 100% rename from src/gstools/covmodel/tools.py rename to src/gstools_cython/covmodel/tools.py diff --git a/src/gstools/covmodel/tpl_models.py b/src/gstools_cython/covmodel/tpl_models.py similarity index 100% rename from src/gstools/covmodel/tpl_models.py rename to src/gstools_cython/covmodel/tpl_models.py diff --git a/src/gstools/field/__init__.py b/src/gstools_cython/field/__init__.py similarity index 100% rename from src/gstools/field/__init__.py rename to src/gstools_cython/field/__init__.py diff --git a/src/gstools/field/base.py b/src/gstools_cython/field/base.py similarity index 100% rename from src/gstools/field/base.py rename to src/gstools_cython/field/base.py diff --git a/src/gstools/field/cond_srf.py b/src/gstools_cython/field/cond_srf.py similarity index 100% rename from src/gstools/field/cond_srf.py rename to src/gstools_cython/field/cond_srf.py diff --git a/src/gstools/field/generator.py b/src/gstools_cython/field/generator.py similarity index 100% rename from src/gstools/field/generator.py rename to src/gstools_cython/field/generator.py diff --git a/src/gstools/field/plot.py b/src/gstools_cython/field/plot.py similarity index 100% rename from src/gstools/field/plot.py rename to src/gstools_cython/field/plot.py diff --git a/src/gstools/field/srf.py b/src/gstools_cython/field/srf.py similarity index 100% rename from src/gstools/field/srf.py rename to src/gstools_cython/field/srf.py diff --git a/src/gstools/field/summator.pyx b/src/gstools_cython/field/summator.pyx similarity index 100% rename from src/gstools/field/summator.pyx rename to src/gstools_cython/field/summator.pyx diff --git a/src/gstools/field/tools.py b/src/gstools_cython/field/tools.py similarity index 100% rename from src/gstools/field/tools.py rename to src/gstools_cython/field/tools.py diff --git a/src/gstools/field/upscaling.py b/src/gstools_cython/field/upscaling.py similarity index 100% rename from src/gstools/field/upscaling.py rename to src/gstools_cython/field/upscaling.py diff --git a/src/gstools/krige/__init__.py b/src/gstools_cython/krige/__init__.py similarity index 100% rename from src/gstools/krige/__init__.py rename to src/gstools_cython/krige/__init__.py diff --git a/src/gstools/krige/base.py b/src/gstools_cython/krige/base.py similarity index 100% rename from src/gstools/krige/base.py rename to src/gstools_cython/krige/base.py diff --git a/src/gstools/krige/krigesum.pyx b/src/gstools_cython/krige/krigesum.pyx similarity index 100% rename from src/gstools/krige/krigesum.pyx rename to src/gstools_cython/krige/krigesum.pyx diff --git a/src/gstools/krige/methods.py b/src/gstools_cython/krige/methods.py similarity index 100% rename from src/gstools/krige/methods.py rename to src/gstools_cython/krige/methods.py diff --git a/src/gstools/krige/tools.py b/src/gstools_cython/krige/tools.py similarity index 100% rename from src/gstools/krige/tools.py rename to src/gstools_cython/krige/tools.py diff --git a/src/gstools/normalizer/__init__.py b/src/gstools_cython/normalizer/__init__.py similarity index 100% rename from src/gstools/normalizer/__init__.py rename to src/gstools_cython/normalizer/__init__.py diff --git a/src/gstools/normalizer/base.py b/src/gstools_cython/normalizer/base.py similarity index 100% rename from src/gstools/normalizer/base.py rename to src/gstools_cython/normalizer/base.py diff --git a/src/gstools/normalizer/methods.py b/src/gstools_cython/normalizer/methods.py similarity index 100% rename from src/gstools/normalizer/methods.py rename to src/gstools_cython/normalizer/methods.py diff --git a/src/gstools/normalizer/tools.py b/src/gstools_cython/normalizer/tools.py similarity index 100% rename from src/gstools/normalizer/tools.py rename to src/gstools_cython/normalizer/tools.py diff --git a/src/gstools/random/__init__.py b/src/gstools_cython/random/__init__.py similarity index 100% rename from src/gstools/random/__init__.py rename to src/gstools_cython/random/__init__.py diff --git a/src/gstools/random/rng.py b/src/gstools_cython/random/rng.py similarity index 100% rename from src/gstools/random/rng.py rename to src/gstools_cython/random/rng.py diff --git a/src/gstools/random/tools.py b/src/gstools_cython/random/tools.py similarity index 100% rename from src/gstools/random/tools.py rename to src/gstools_cython/random/tools.py diff --git a/src/gstools/tools/__init__.py b/src/gstools_cython/tools/__init__.py similarity index 100% rename from src/gstools/tools/__init__.py rename to src/gstools_cython/tools/__init__.py diff --git a/src/gstools/tools/export.py b/src/gstools_cython/tools/export.py similarity index 100% rename from src/gstools/tools/export.py rename to src/gstools_cython/tools/export.py diff --git a/src/gstools/tools/geometric.py b/src/gstools_cython/tools/geometric.py similarity index 100% rename from src/gstools/tools/geometric.py rename to src/gstools_cython/tools/geometric.py diff --git a/src/gstools/tools/misc.py b/src/gstools_cython/tools/misc.py similarity index 100% rename from src/gstools/tools/misc.py rename to src/gstools_cython/tools/misc.py diff --git a/src/gstools/tools/special.py b/src/gstools_cython/tools/special.py similarity index 100% rename from src/gstools/tools/special.py rename to src/gstools_cython/tools/special.py diff --git a/src/gstools/transform/__init__.py b/src/gstools_cython/transform/__init__.py similarity index 100% rename from src/gstools/transform/__init__.py rename to src/gstools_cython/transform/__init__.py diff --git a/src/gstools/transform/array.py b/src/gstools_cython/transform/array.py similarity index 100% rename from src/gstools/transform/array.py rename to src/gstools_cython/transform/array.py diff --git a/src/gstools/transform/field.py b/src/gstools_cython/transform/field.py similarity index 100% rename from src/gstools/transform/field.py rename to src/gstools_cython/transform/field.py diff --git a/src/gstools/variogram/__init__.py b/src/gstools_cython/variogram/__init__.py similarity index 100% rename from src/gstools/variogram/__init__.py rename to src/gstools_cython/variogram/__init__.py diff --git a/src/gstools/variogram/binning.py b/src/gstools_cython/variogram/binning.py similarity index 100% rename from src/gstools/variogram/binning.py rename to src/gstools_cython/variogram/binning.py diff --git a/src/gstools/variogram/estimator.pyx b/src/gstools_cython/variogram/estimator.pyx similarity index 100% rename from src/gstools/variogram/estimator.pyx rename to src/gstools_cython/variogram/estimator.pyx diff --git a/src/gstools/variogram/variogram.py b/src/gstools_cython/variogram/variogram.py similarity index 100% rename from src/gstools/variogram/variogram.py rename to src/gstools_cython/variogram/variogram.py From 34ad0a037d03e0d5872020d59a7164b79d66e98b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20M=C3=BCller?= Date: Thu, 11 Jul 2024 23:34:36 +0200 Subject: [PATCH 02/26] move cython files to top level --- src/gstools_cython/{variogram => }/estimator.pyx | 0 src/gstools_cython/{krige => }/krigesum.pyx | 0 src/gstools_cython/{field => }/summator.pyx | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename src/gstools_cython/{variogram => }/estimator.pyx (100%) rename src/gstools_cython/{krige => }/krigesum.pyx (100%) rename src/gstools_cython/{field => }/summator.pyx (100%) diff --git a/src/gstools_cython/variogram/estimator.pyx b/src/gstools_cython/estimator.pyx similarity index 100% rename from src/gstools_cython/variogram/estimator.pyx rename to src/gstools_cython/estimator.pyx diff --git a/src/gstools_cython/krige/krigesum.pyx b/src/gstools_cython/krigesum.pyx similarity index 100% rename from src/gstools_cython/krige/krigesum.pyx rename to src/gstools_cython/krigesum.pyx diff --git a/src/gstools_cython/field/summator.pyx b/src/gstools_cython/summator.pyx similarity index 100% rename from src/gstools_cython/field/summator.pyx rename to src/gstools_cython/summator.pyx From 3ed55ee2d66a54cea5492248198b040d35243b93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20M=C3=BCller?= Date: Thu, 11 Jul 2024 23:35:37 +0200 Subject: [PATCH 03/26] remove all non-cython files --- src/gstools_cython/config.py | 16 - src/gstools_cython/covmodel/__init__.py | 98 -- src/gstools_cython/covmodel/base.py | 1210 --------------------- src/gstools_cython/covmodel/fit.py | 540 --------- src/gstools_cython/covmodel/models.py | 960 ---------------- src/gstools_cython/covmodel/plot.py | 288 ----- src/gstools_cython/covmodel/tools.py | 647 ----------- src/gstools_cython/covmodel/tpl_models.py | 570 ---------- src/gstools_cython/field/__init__.py | 37 - src/gstools_cython/field/base.py | 705 ------------ src/gstools_cython/field/cond_srf.py | 313 ------ src/gstools_cython/field/generator.py | 534 --------- src/gstools_cython/field/plot.py | 402 ------- src/gstools_cython/field/srf.py | 218 ---- src/gstools_cython/field/tools.py | 257 ----- src/gstools_cython/field/upscaling.py | 98 -- src/gstools_cython/krige/__init__.py | 29 - src/gstools_cython/krige/base.py | 729 ------------- src/gstools_cython/krige/methods.py | 520 --------- src/gstools_cython/krige/tools.py | 96 -- src/gstools_cython/normalizer/__init__.py | 61 -- src/gstools_cython/normalizer/base.py | 260 ----- src/gstools_cython/normalizer/methods.py | 363 ------- src/gstools_cython/normalizer/tools.py | 186 ---- src/gstools_cython/random/__init__.py | 36 - src/gstools_cython/random/rng.py | 221 ---- src/gstools_cython/random/tools.py | 183 ---- src/gstools_cython/tools/__init__.py | 159 --- src/gstools_cython/tools/export.py | 236 ---- src/gstools_cython/tools/geometric.py | 754 ------------- src/gstools_cython/tools/misc.py | 143 --- src/gstools_cython/tools/special.py | 257 ----- src/gstools_cython/transform/__init__.py | 93 -- src/gstools_cython/transform/array.py | 360 ------ src/gstools_cython/transform/field.py | 725 ------------ src/gstools_cython/variogram/__init__.py | 40 - src/gstools_cython/variogram/binning.py | 104 -- src/gstools_cython/variogram/variogram.py | 499 --------- 38 files changed, 12947 deletions(-) delete mode 100644 src/gstools_cython/config.py delete mode 100644 src/gstools_cython/covmodel/__init__.py delete mode 100644 src/gstools_cython/covmodel/base.py delete mode 100755 src/gstools_cython/covmodel/fit.py delete mode 100644 src/gstools_cython/covmodel/models.py delete mode 100644 src/gstools_cython/covmodel/plot.py delete mode 100644 src/gstools_cython/covmodel/tools.py delete mode 100644 src/gstools_cython/covmodel/tpl_models.py delete mode 100644 src/gstools_cython/field/__init__.py delete mode 100755 src/gstools_cython/field/base.py delete mode 100644 src/gstools_cython/field/cond_srf.py delete mode 100644 src/gstools_cython/field/generator.py delete mode 100644 src/gstools_cython/field/plot.py delete mode 100644 src/gstools_cython/field/srf.py delete mode 100644 src/gstools_cython/field/tools.py delete mode 100644 src/gstools_cython/field/upscaling.py delete mode 100644 src/gstools_cython/krige/__init__.py delete mode 100755 src/gstools_cython/krige/base.py delete mode 100644 src/gstools_cython/krige/methods.py delete mode 100644 src/gstools_cython/krige/tools.py delete mode 100644 src/gstools_cython/normalizer/__init__.py delete mode 100644 src/gstools_cython/normalizer/base.py delete mode 100644 src/gstools_cython/normalizer/methods.py delete mode 100644 src/gstools_cython/normalizer/tools.py delete mode 100644 src/gstools_cython/random/__init__.py delete mode 100644 src/gstools_cython/random/rng.py delete mode 100644 src/gstools_cython/random/tools.py delete mode 100644 src/gstools_cython/tools/__init__.py delete mode 100644 src/gstools_cython/tools/export.py delete mode 100644 src/gstools_cython/tools/geometric.py delete mode 100755 src/gstools_cython/tools/misc.py delete mode 100644 src/gstools_cython/tools/special.py delete mode 100644 src/gstools_cython/transform/__init__.py delete mode 100644 src/gstools_cython/transform/array.py delete mode 100644 src/gstools_cython/transform/field.py delete mode 100644 src/gstools_cython/variogram/__init__.py delete mode 100644 src/gstools_cython/variogram/binning.py delete mode 100644 src/gstools_cython/variogram/variogram.py diff --git a/src/gstools_cython/config.py b/src/gstools_cython/config.py deleted file mode 100644 index 24ce20c7..00000000 --- a/src/gstools_cython/config.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -GStools subpackage providing global variables. - -.. currentmodule:: gstools.config - -""" - -NUM_THREADS = None - -# pylint: disable=W0611 -try: # pragma: no cover - import gstools_core - - USE_RUST = True -except ImportError: - USE_RUST = False diff --git a/src/gstools_cython/covmodel/__init__.py b/src/gstools_cython/covmodel/__init__.py deleted file mode 100644 index 28ab81f2..00000000 --- a/src/gstools_cython/covmodel/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -GStools subpackage providing a set of handy covariance models. - -.. currentmodule:: gstools.covmodel - -Subpackages -^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - plot - -Covariance Base-Class -^^^^^^^^^^^^^^^^^^^^^ -Class to construct user defined covariance models - -.. autosummary:: - :toctree: - - CovModel - -Covariance Models -^^^^^^^^^^^^^^^^^ -Standard Covariance Models - -.. autosummary:: - :toctree: - - Gaussian - Exponential - Matern - Integral - Stable - Rational - Cubic - Linear - Circular - Spherical - HyperSpherical - SuperSpherical - JBessel - -Truncated Power Law Covariance Models -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - TPLGaussian - TPLExponential - TPLStable - TPLSimple -""" - -from gstools.covmodel.base import CovModel -from gstools.covmodel.models import ( - Circular, - Cubic, - Exponential, - Gaussian, - HyperSpherical, - Integral, - JBessel, - Linear, - Matern, - Rational, - Spherical, - Stable, - SuperSpherical, -) -from gstools.covmodel.tpl_models import ( - TPLExponential, - TPLGaussian, - TPLSimple, - TPLStable, -) - -__all__ = [ - "CovModel", - "Gaussian", - "Exponential", - "Matern", - "Integral", - "Stable", - "Rational", - "Cubic", - "Linear", - "Circular", - "Spherical", - "HyperSpherical", - "SuperSpherical", - "JBessel", - "TPLGaussian", - "TPLExponential", - "TPLStable", - "TPLSimple", -] diff --git a/src/gstools_cython/covmodel/base.py b/src/gstools_cython/covmodel/base.py deleted file mode 100644 index 23e19881..00000000 --- a/src/gstools_cython/covmodel/base.py +++ /dev/null @@ -1,1210 +0,0 @@ -""" -GStools subpackage providing the base class for covariance models. - -.. currentmodule:: gstools.covmodel.base - -The following classes are provided - -.. autosummary:: - CovModel -""" - -# pylint: disable=C0103, R0201, E1101, C0302, W0613 -import copy - -import numpy as np -from hankel import SymmetricFourierTransform as SFT -from scipy.integrate import quad as integral - -from gstools.covmodel import plot -from gstools.covmodel.fit import fit_variogram -from gstools.covmodel.tools import ( - _init_subclass, - check_arg_bounds, - check_bounds, - compare, - default_arg_from_bounds, - model_repr, - percentile_scale, - set_arg_bounds, - set_dim, - set_len_anis, - set_model_angles, - set_opt_args, - spectral_rad_pdf, -) -from gstools.tools import RADIAN_SCALE -from gstools.tools.geometric import ( - great_circle_to_chordal, - latlon2pos, - matrix_anisometrize, - matrix_isometrize, - pos2latlon, - rotated_main_axes, -) - -__all__ = ["CovModel"] - -# default arguments for hankel.SymmetricFourierTransform -HANKEL_DEFAULT = {"a": -1, "b": 1, "N": 200, "h": 0.001, "alt": True} - - -class CovModel: - r"""Base class for the GSTools covariance models. - - Parameters - ---------- - dim : :class:`int`, optional - dimension of the model. - Includes the temporal dimension if temporal is true. - To specify only the spatial dimension in that case, use `spatial_dim`. - Default: ``3`` - var : :class:`float`, optional - variance of the model (the nugget is not included in "this" variance) - Default: ``1.0`` - len_scale : :class:`float` or :class:`list`, optional - length scale of the model. - If a single value is given, the same length-scale will be used for - every direction. If multiple values (for main and transversal - directions) are given, `anis` will be - recalculated accordingly. If only two values are given in 3D, - the latter one will be used for both transversal directions. - Default: ``1.0`` - nugget : :class:`float`, optional - nugget of the model. Default: ``0.0`` - anis : :class:`float` or :class:`list`, optional - anisotropy ratios in the transversal directions [e_y, e_z]. - - * e_y = l_y / l_x - * e_z = l_z / l_x - - If only one value is given in 3D, e_y will be set to 1. - This value will be ignored, if multiple len_scales are given. - Default: ``1.0`` - angles : :class:`float` or :class:`list`, optional - angles of rotation (given in rad): - - * in 2D: given as rotation around z-axis - * in 3D: given by yaw, pitch, and roll (known as Tait–Bryan angles) - - Default: ``0.0`` - integral_scale : :class:`float` or :class:`list` or :any:`None`, optional - If given, ``len_scale`` will be ignored and recalculated, - so that the integral scale of the model matches the given one. - Default: :any:`None` - rescale : :class:`float` or :any:`None`, optional - Optional rescaling factor to divide the length scale with. - This could be used for unit conversion or rescaling the length scale - to coincide with e.g. the integral scale. - Will be set by each model individually. - Default: :any:`None` - latlon : :class:`bool`, optional - Whether the model is describing 2D fields on earths surface described - by latitude and longitude. When using this, the model will internally - use the associated 'Yadrenko' model to represent a valid model. - This means, the spatial distance :math:`r` will be replaced by - :math:`2\sin(\alpha/2)`, where :math:`\alpha` is the great-circle - distance, which is equal to the spatial distance of two points in 3D. - As a consequence, `dim` will be set to `3` and anisotropy will be - disabled. `geo_scale` can be set to e.g. earth's radius, - to have a meaningful `len_scale` parameter. - Default: False - geo_scale : :class:`float`, optional - Geographic unit scaling in case of latlon coordinates to get a - meaningful length scale unit. - By default, len_scale is assumed to be in radians with latlon=True. - Can be set to :any:`KM_SCALE` to have len_scale in km or - :any:`DEGREE_SCALE` to have len_scale in degrees. - Default: :any:`RADIAN_SCALE` - temporal : :class:`bool`, optional - Create a metric spatio-temporal covariance model. - Setting this to true will increase `dim` and `field_dim` by 1. - `spatial_dim` will be `field_dim - 1`. - The time-dimension is appended, meaning the pos tuple is (x,y,z,...,t). - Default: False - spatial_dim : :class:`int`, optional - spatial dimension of the model. - If given, the model dimension will be determined from this spatial dimension - and the possible temporal dimension if temporal is ture. - Default: None - var_raw : :class:`float` or :any:`None`, optional - raw variance of the model which will be multiplied with - :any:`CovModel.var_factor` to result in the actual variance. - If given, ``var`` will be ignored. - (This is just for models that override :any:`CovModel.var_factor`) - Default: :any:`None` - hankel_kw: :class:`dict` or :any:`None`, optional - Modify the init-arguments of - :any:`hankel.SymmetricFourierTransform` - used for the spectrum calculation. Use with caution (Better: Don't!). - ``None`` is equivalent to ``{"a": -1, "b": 1, "N": 1000, "h": 0.001}``. - Default: :any:`None` - **opt_arg - Optional arguments are covered by these keyword arguments. - If present, they are described in the section `Other Parameters`. - """ - - def __init__( - self, - dim=3, - var=1.0, - len_scale=1.0, - nugget=0.0, - anis=1.0, - angles=0.0, - *, - integral_scale=None, - rescale=None, - latlon=False, - geo_scale=RADIAN_SCALE, - temporal=False, - spatial_dim=None, - var_raw=None, - hankel_kw=None, - **opt_arg, - ): - # assert, that we use a subclass - # this is the case, if __init_subclass__ is called, which creates - # the "variogram"... so we check for that - if not hasattr(self, "variogram"): - raise TypeError("Don't instantiate 'CovModel' directly!") - - # prepare dim setting - self._dim = None - self._hankel_kw = None - self._sft = None - # prepare parameters (they are checked in dim setting) - self._rescale = None - self._len_scale = None - self._anis = None - self._angles = None - # prepare parameters boundaries - self._var_bounds = None - self._len_scale_bounds = None - self._nugget_bounds = None - self._anis_bounds = None - self._opt_arg_bounds = {} - # Set latlon and temporal first - self._latlon = bool(latlon) - self._temporal = bool(temporal) - self._geo_scale = abs(float(geo_scale)) - # SFT class will be created within dim.setter but needs hankel_kw - self.hankel_kw = hankel_kw - # using time increases model dimension given by "spatial_dim" - self.dim = ( - dim if spatial_dim is None else spatial_dim + int(self.temporal) - ) - - # optional arguments for the variogram-model - set_opt_args(self, opt_arg) - - # set standard boundaries for variance, len_scale, nugget and opt_arg - bounds = self.default_arg_bounds() - bounds.update(self.default_opt_arg_bounds()) - self.set_arg_bounds(check_args=False, **bounds) - - # set parameters - self.rescale = rescale - self._nugget = float(nugget) - - # set anisotropy and len_scale, disable anisotropy for latlon models - self._len_scale, self._anis = set_len_anis( - self.dim, len_scale, anis, self.latlon - ) - self._angles = set_model_angles( - self.dim, angles, self.latlon, self.temporal - ) - - # set var at last, because of the var_factor (to be right initialized) - if var_raw is None: - self._var = None - self.var = var - else: - self._var = float(var_raw) - self._integral_scale = None - self.integral_scale = integral_scale - # set var again, if int_scale affects var_factor - if var_raw is None: - self._var = None - self.var = var - else: - self._var = float(var_raw) - # final check for parameter bounds - self.check_arg_bounds() - # additional checks for the optional arguments (provided by user) - self.check_opt_arg() - # precision for printing - self._prec = 3 - - # one of these functions needs to be overridden - def __init_subclass__(cls): - """Initialize gstools covariance model.""" - _init_subclass(cls) - - # modify the docstrings: class docstring gets attributes added - if cls.__doc__ is None: - cls.__doc__ = "User defined GSTools Covariance-Model." - cls.__doc__ += CovModel.__doc__[45:] - # overridden functions get standard doc if no new doc was created - ign = ["__", "variogram", "covariance", "cor"] - for att, attr_cls in cls.__dict__.items(): - if any(att.startswith(i) for i in ign) or att not in dir(CovModel): - continue - attr_doc = getattr(CovModel, att).__doc__ - if attr_cls.__doc__ is None: - attr_cls.__doc__ = attr_doc - - # special variogram functions - - def vario_axis(self, r, axis=0): - r"""Variogram along axis of anisotropy.""" - if axis == 0: - return self.variogram(r) - return self.variogram(np.abs(r) / self.anis[axis - 1]) - - def cov_axis(self, r, axis=0): - r"""Covariance along axis of anisotropy.""" - if axis == 0: - return self.covariance(r) - return self.covariance(np.abs(r) / self.anis[axis - 1]) - - def cor_axis(self, r, axis=0): - r"""Correlation along axis of anisotropy.""" - if axis == 0: - return self.correlation(r) - return self.correlation(np.abs(r) / self.anis[axis - 1]) - - def vario_yadrenko(self, zeta): - r"""Yadrenko variogram for great-circle distance from latlon-pos.""" - return self.variogram(great_circle_to_chordal(zeta, self.geo_scale)) - - def cov_yadrenko(self, zeta): - r"""Yadrenko covariance for great-circle distance from latlon-pos.""" - return self.covariance(great_circle_to_chordal(zeta, self.geo_scale)) - - def cor_yadrenko(self, zeta): - r"""Yadrenko correlation for great-circle distance from latlon-pos.""" - return self.correlation(great_circle_to_chordal(zeta, self.geo_scale)) - - def vario_spatial(self, pos): - r"""Spatial variogram respecting anisotropy and rotation.""" - return self.variogram(self._get_iso_rad(pos)) - - def cov_spatial(self, pos): - r"""Spatial covariance respecting anisotropy and rotation.""" - return self.covariance(self._get_iso_rad(pos)) - - def cor_spatial(self, pos): - r"""Spatial correlation respecting anisotropy and rotation.""" - return self.correlation(self._get_iso_rad(pos)) - - def vario_nugget(self, r): - """Isotropic variogram of the model respecting the nugget at r=0.""" - r = np.asarray(np.abs(r), dtype=np.double) - r_gz = np.logical_not(np.isclose(r, 0)) - res = np.empty_like(r, dtype=np.double) - res[r_gz] = self.variogram(r[r_gz]) - res[np.logical_not(r_gz)] = 0.0 - return res - - def cov_nugget(self, r): - """Isotropic covariance of the model respecting the nugget at r=0.""" - r = np.asarray(np.abs(r), dtype=np.double) - r_gz = np.logical_not(np.isclose(r, 0)) - res = np.empty_like(r, dtype=np.double) - res[r_gz] = self.covariance(r[r_gz]) - res[np.logical_not(r_gz)] = self.sill - return res - - def plot(self, func="variogram", **kwargs): # pragma: no cover - """ - Plot a function of a the CovModel. - - Parameters - ---------- - func : :class:`str`, optional - Function to be plotted. Could be one of: - - * "variogram" - * "covariance" - * "correlation" - * "vario_spatial" - * "cov_spatial" - * "cor_spatial" - * "vario_yadrenko" - * "cov_yadrenko" - * "cor_yadrenko" - * "vario_axis" - * "cov_axis" - * "cor_axis" - * "spectrum" - * "spectral_density" - * "spectral_rad_pdf" - - **kwargs - Keyword arguments forwarded to the plotting function - `"plot_" + func` in :py:mod:`gstools.covmodel.plot`. - - See Also - -------- - gstools.covmodel.plot - """ - routine = getattr(plot, "plot_" + func) - return routine(self, **kwargs) - - # pykrige functions - - def pykrige_vario(self, args=None, r=0): # pragma: no cover - """Isotropic variogram of the model for pykrige.""" - if self.latlon: - return self.vario_yadrenko(np.deg2rad(r)) - return self.variogram(r) - - @property - def pykrige_anis(self): - """2D anisotropy ratio for pykrige.""" - if self.dim == 2: - return 1 / self.anis[0] - return 1.0 # pragma: no cover - - @property - def pykrige_anis_y(self): - """3D anisotropy ratio in y direction for pykrige.""" - if self.dim >= 2: - return 1 / self.anis[0] - return 1.0 # pragma: no cover - - @property - def pykrige_anis_z(self): - """3D anisotropy ratio in z direction for pykrige.""" - if self.dim == 3: - return 1 / self.anis[1] - return 1.0 # pragma: no cover - - @property - def pykrige_angle(self): - """2D rotation angle for pykrige.""" - if self.dim == 2: - return self.angles[0] / np.pi * 180 - return 0.0 # pragma: no cover - - @property - def pykrige_angle_z(self): - """3D rotation angle around z for pykrige.""" - if self.dim >= 2: - return self.angles[0] / np.pi * 180 - return 0.0 # pragma: no cover - - @property - def pykrige_angle_y(self): - """3D rotation angle around y for pykrige.""" - if self.dim == 3: - return self.angles[1] / np.pi * 180 - return 0.0 # pragma: no cover - - @property - def pykrige_angle_x(self): - """3D rotation angle around x for pykrige.""" - if self.dim == 3: - return self.angles[2] / np.pi * 180 - return 0.0 # pragma: no cover - - @property - def pykrige_kwargs(self): - """Keyword arguments for pykrige routines.""" - kwargs = { - "variogram_model": "custom", - "variogram_parameters": [], - "variogram_function": self.pykrige_vario, - } - if self.dim == 1: - add_kwargs = {} - elif self.dim == 2: - add_kwargs = { - "anisotropy_scaling": self.pykrige_anis, - "anisotropy_angle": self.pykrige_angle, - } - else: - add_kwargs = { - "anisotropy_scaling_y": self.pykrige_anis_y, - "anisotropy_scaling_z": self.pykrige_anis_z, - "anisotropy_angle_x": self.pykrige_angle_x, - "anisotropy_angle_y": self.pykrige_angle_y, - "anisotropy_angle_z": self.pykrige_angle_z, - } - kwargs.update(add_kwargs) - return kwargs - - # methods for optional/default arguments (can be overridden) - - def default_opt_arg(self): - """Provide default optional arguments by the user. - - Should be given as a dictionary when overridden. - """ - return { - opt: default_arg_from_bounds(bnd) - for (opt, bnd) in self.default_opt_arg_bounds().items() - } - - def default_opt_arg_bounds(self): - """Provide default boundaries for optional arguments.""" - res = {} - for opt in self.opt_arg: - res[opt] = [-np.inf, np.inf] - return res - - def check_opt_arg(self): - """Run checks for the optional arguments. - - This is in addition to the bound-checks - - Notes - ----- - * You can use this to raise a ValueError/warning - * Any return value will be ignored - * This method will only be run once, when the class is initialized - """ - - def check_dim(self, dim): - """Check the given dimension.""" - return True - - def fix_dim(self): - """Set a fix dimension for the model.""" - return None - - def var_factor(self): - """Factor for the variance.""" - return 1.0 - - def default_rescale(self): - """Provide default rescaling factor.""" - return 1.0 - - # calculation of different scales - - def calc_integral_scale(self): - """Calculate the integral scale of the isotrope model.""" - self._integral_scale = integral(self.correlation, 0, np.inf)[0] - return self._integral_scale - - def percentile_scale(self, per=0.9): - """Calculate the percentile scale of the isotrope model. - - This is the distance, where the given percentile of the variance - is reached by the variogram - """ - return percentile_scale(self, per) - - # spectrum methods (can be overridden for speedup) - - def spectrum(self, k): - r""" - Spectrum of the covariance model. - - This is given by: - - .. math:: S(\mathbf{k}) = \left(\frac{1}{2\pi}\right)^n - \int C(r) e^{i \mathbf{k}\cdot\mathbf{r}} d^n\mathbf{r} - - Internally, this is calculated by the hankel transformation: - - .. math:: S(k) = \left(\frac{1}{2\pi}\right)^n \cdot - \frac{(2\pi)^{n/2}}{k^{n/2-1}} - \int_0^\infty r^{n/2} C(r) J_{n/2-1}(kr) dr - - Where :math:`C(r)` is the covariance function of the model. - - Parameters - ---------- - k : :class:`float` - Radius of the phase: :math:`k=\left\Vert\mathbf{k}\right\Vert` - """ - return self.spectral_density(k) * self.var - - def spectral_density(self, k): - r""" - Spectral density of the covariance model. - - This is given by: - - .. math:: \tilde{S}(k) = \frac{S(k)}{\sigma^2} - - Where :math:`S(k)` is the spectrum of the covariance model. - - Parameters - ---------- - k : :class:`float` - Radius of the phase: :math:`k=\left\Vert\mathbf{k}\right\Vert` - """ - k = np.asarray(np.abs(k), dtype=np.double) - return self._sft.transform(self.correlation, k, ret_err=False) - - def spectral_rad_pdf(self, r): - """Radial spectral density of the model.""" - return spectral_rad_pdf(self, r) - - def ln_spectral_rad_pdf(self, r): - """Log radial spectral density of the model.""" - with np.errstate(divide="ignore"): - return np.log(self.spectral_rad_pdf(r)) - - def _has_cdf(self): - """State if a cdf is defined with 'spectral_rad_cdf'.""" - return hasattr(self, "spectral_rad_cdf") - - def _has_ppf(self): - """State if a ppf is defined with 'spectral_rad_ppf'.""" - return hasattr(self, "spectral_rad_ppf") - - # spatial routines - - def isometrize(self, pos): - """Make a position tuple ready for isotropic operations.""" - pos = np.asarray(pos, dtype=np.double).reshape((self.field_dim, -1)) - if self.latlon: - return latlon2pos( - pos, - radius=self.geo_scale, - temporal=self.temporal, - time_scale=self.anis[-1], - ) - return np.dot(matrix_isometrize(self.dim, self.angles, self.anis), pos) - - def anisometrize(self, pos): - """Bring a position tuple into the anisotropic coordinate-system.""" - pos = np.asarray(pos, dtype=np.double).reshape((self.dim, -1)) - if self.latlon: - return pos2latlon( - pos, - radius=self.geo_scale, - temporal=self.temporal, - time_scale=self.anis[-1], - ) - return np.dot( - matrix_anisometrize(self.dim, self.angles, self.anis), pos - ) - - def main_axes(self): - """Axes of the rotated coordinate-system.""" - return rotated_main_axes(self.dim, self.angles) - - def _get_iso_rad(self, pos): - """Isometrized radians.""" - pos = np.asarray(pos, dtype=np.double).reshape((self.dim, -1)) - iso = np.dot(matrix_isometrize(self.dim, self.angles, self.anis), pos) - return np.linalg.norm(iso, axis=0) - - # fitting routine - - def fit_variogram( - self, - x_data, - y_data, - anis=True, - sill=None, - init_guess="default", - weights=None, - method="trf", - loss="soft_l1", - max_eval=None, - return_r2=False, - curve_fit_kwargs=None, - **para_select, - ): - """ - Fitting the variogram-model to an empirical variogram. - - Parameters - ---------- - x_data : :class:`numpy.ndarray` - The bin-centers of the empirical variogram. - y_data : :class:`numpy.ndarray` - The measured variogram - If multiple are given, they are interpreted as the directional - variograms along the main axis of the associated rotated - coordinate system. - Anisotropy ratios will be estimated in that case. - anis : :class:`bool`, optional - In case of a directional variogram, you can control anisotropy - by this argument. Deselect the parameter from fitting, by setting - it "False". - You could also pass a fixed value to be set in the model. - Then the anisotropy ratios wont be altered during fitting. - Default: True - sill : :class:`float` or :class:`bool`, optional - Here you can provide a fixed sill for the variogram. - It needs to be in a fitting range for the var and nugget bounds. - If variance or nugget are not selected for estimation, - the nugget will be recalculated to fulfill: - - * sill = var + nugget - * if the variance is bigger than the sill, - nugget will bet set to its lower bound - and the variance will be set to the fitting partial sill. - - If variance is deselected, it needs to be less than the sill, - otherwise a ValueError comes up. Same for nugget. - If sill=False, it will be deselected from estimation - and set to the current sill of the model. - Then, the procedure above is applied. - Default: None - init_guess : :class:`str` or :class:`dict`, optional - Initial guess for the estimation. Either: - - * "default": using the default values of the covariance model - ("len_scale" will be mean of given bin centers; - "var" and "nugget" will be mean of given variogram values - (if in given bounds)) - * "current": using the current values of the covariance model - * dict: dictionary with parameter names and given value - (separate "default" can bet set to "default" or "current" for - unspecified values to get same behavior as given above - ("default" by default)) - Example: ``{"len_scale": 10, "default": "current"}`` - - Default: "default" - weights : :class:`str`, :class:`numpy.ndarray`, :class:`callable`, optional - Weights applied to each point in the estimation. Either: - - * 'inv': inverse distance ``1 / (x_data + 1)`` - * list: weights given per bin - * callable: function applied to x_data - - If callable, it must take a 1-d ndarray. - Then ``weights = f(x_data)``. - Default: None - method : {'trf', 'dogbox'}, optional - Algorithm to perform minimization. - - * 'trf' : Trust Region Reflective algorithm, - particularly suitable for large sparse problems with bounds. - Generally robust method. - * 'dogbox' : dogleg algorithm with rectangular trust regions, - typical use case is small problems with bounds. - Not recommended for problems with rank-deficient Jacobian. - - Default: 'trf' - loss : :class:`str` or :class:`callable`, optional - Determines the loss function in scipys curve_fit. - The following keyword values are allowed: - - * 'linear' (default) : ``rho(z) = z``. Gives a standard - least-squares problem. - * 'soft_l1' : ``rho(z) = 2 * ((1 + z)**0.5 - 1)``. The smooth - approximation of l1 (absolute value) loss. Usually a good - choice for robust least squares. - * 'huber' : ``rho(z) = z if z <= 1 else 2*z**0.5 - 1``. Works - similarly to 'soft_l1'. - * 'cauchy' : ``rho(z) = ln(1 + z)``. Severely weakens outliers - influence, but may cause difficulties in optimization process. - * 'arctan' : ``rho(z) = arctan(z)``. Limits a maximum loss on - a single residual, has properties similar to 'cauchy'. - - If callable, it must take a 1-d ndarray ``z=f**2`` and return an - array_like with shape (3, m) where row 0 contains function values, - row 1 contains first derivatives and row 2 contains second - derivatives. Default: 'soft_l1' - max_eval : :class:`int` or :any:`None`, optional - Maximum number of function evaluations before the termination. - If None (default), the value is chosen automatically: 100 * n. - return_r2 : :class:`bool`, optional - Whether to return the r2 score of the estimation. - Default: False - curve_fit_kwargs : :class:`dict`, optional - Other keyword arguments passed to scipys curve_fit. Default: None - **para_select - You can deselect parameters from fitting, by setting - them "False" using their names as keywords. - You could also pass fixed values for each parameter. - Then these values will be applied and the involved parameters wont - be fitted. - By default, all parameters are fitted. - - Returns - ------- - fit_para : :class:`dict` - Dictionary with the fitted parameter values - pcov : :class:`numpy.ndarray` - The estimated covariance of `popt` from - :any:`scipy.optimize.curve_fit`. - To compute one standard deviation errors - on the parameters use ``perr = np.sqrt(np.diag(pcov))``. - r2_score : :class:`float`, optional - r2 score of the curve fitting results. Only if return_r2 is True. - - Notes - ----- - You can set the bounds for each parameter by accessing - :any:`CovModel.set_arg_bounds`. - - The fitted parameters will be instantly set in the model. - """ - return fit_variogram( - model=self, - x_data=x_data, - y_data=y_data, - anis=anis, - sill=sill, - init_guess=init_guess, - weights=weights, - method=method, - loss=loss, - max_eval=max_eval, - return_r2=return_r2, - curve_fit_kwargs=curve_fit_kwargs, - **para_select, - ) - - # bounds setting and checks - - def default_arg_bounds(self): - """Provide default boundaries for arguments. - - Given as a dictionary. - """ - res = { - "var": (0.0, np.inf, "oo"), - "len_scale": (0.0, np.inf, "oo"), - "nugget": (0.0, np.inf, "co"), - "anis": (0.0, np.inf, "oo"), - } - return res - - def set_arg_bounds(self, check_args=True, **kwargs): - r"""Set bounds for the parameters of the model. - - Parameters - ---------- - check_args : bool, optional - Whether to check if the arguments are in their valid bounds. - In case not, a proper default value will be determined. - Default: True - **kwargs - Parameter name as keyword ("var", "len_scale", "nugget", ) - and a list of 2 or 3 values: ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - return set_arg_bounds(self, check_args, **kwargs) - - def check_arg_bounds(self): - """Check arguments to be within their given bounds.""" - return check_arg_bounds(self) - - # bounds properties - - @property - def var_bounds(self): - """:class:`list`: Bounds for the variance. - - Notes - ----- - Is a list of 2 or 3 values: ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - return self._var_bounds - - @var_bounds.setter - def var_bounds(self, bounds): - if not check_bounds(bounds): - raise ValueError( - f"Given bounds for 'var' are not valid, got: {bounds}" - ) - self._var_bounds = bounds - - @property - def len_scale_bounds(self): - """:class:`list`: Bounds for the length scale. - - Notes - ----- - Is a list of 2 or 3 values: ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - return self._len_scale_bounds - - @len_scale_bounds.setter - def len_scale_bounds(self, bounds): - if not check_bounds(bounds): - raise ValueError( - f"Given bounds for 'len_scale' are not valid, got: {bounds}" - ) - self._len_scale_bounds = bounds - - @property - def nugget_bounds(self): - """:class:`list`: Bounds for the nugget. - - Notes - ----- - Is a list of 2 or 3 values: ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - return self._nugget_bounds - - @nugget_bounds.setter - def nugget_bounds(self, bounds): - if not check_bounds(bounds): - raise ValueError( - f"Given bounds for 'nugget' are not valid, got: {bounds}" - ) - self._nugget_bounds = bounds - - @property - def anis_bounds(self): - """:class:`list`: Bounds for the nugget. - - Notes - ----- - Is a list of 2 or 3 values: ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - return self._anis_bounds - - @anis_bounds.setter - def anis_bounds(self, bounds): - if not check_bounds(bounds): - raise ValueError( - f"Given bounds for 'anis' are not valid, got: {bounds}" - ) - self._anis_bounds = bounds - - @property - def opt_arg_bounds(self): - """:class:`dict`: Bounds for the optional arguments. - - Notes - ----- - Keys are the opt-arg names and values are lists of 2 or 3 values: - ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - return self._opt_arg_bounds - - @property - def arg_bounds(self): - """:class:`dict`: Bounds for all parameters. - - Notes - ----- - Keys are the arg names and values are lists of 2 or 3 values: - ``[a, b]`` or ``[a, b, ]`` where - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - res = { - "var": self.var_bounds, - "len_scale": self.len_scale_bounds, - "nugget": self.nugget_bounds, - "anis": self.anis_bounds, - } - res.update(self.opt_arg_bounds) - return res - - @property - def temporal(self): - """:class:`bool`: Whether the model is a metric spatio-temporal one.""" - return self._temporal - - # geographical coordinates related - - @property - def latlon(self): - """:class:`bool`: Whether the model depends on geographical coords.""" - return self._latlon - - @property - def geo_scale(self): - """:class:`float`: Geographic scaling for geographical coords.""" - return self._geo_scale - - @property - def field_dim(self): - """:class:`int`: The (parametric) field dimension of the model (with time).""" - return 2 + int(self.temporal) if self.latlon else self.dim - - @property - def spatial_dim(self): - """:class:`int`: The spatial field dimension of the model (without time).""" - return 2 if self.latlon else self.dim - int(self.temporal) - - # standard parameters - - @property - def dim(self): - """:class:`int`: The dimension of the model.""" - return self._dim - - @dim.setter - def dim(self, dim): - set_dim(self, dim) - - @property - def var(self): - """:class:`float`: The variance of the model.""" - return self._var * self.var_factor() - - @var.setter - def var(self, var): - self._var = float(var) / self.var_factor() - self.check_arg_bounds() - - @property - def var_raw(self): - """:class:`float`: The raw variance of the model without factor. - - (See. CovModel.var_factor) - """ - return self._var - - @var_raw.setter - def var_raw(self, var_raw): - self._var = float(var_raw) - self.check_arg_bounds() - - @property - def nugget(self): - """:class:`float`: The nugget of the model.""" - return self._nugget - - @nugget.setter - def nugget(self, nugget): - self._nugget = float(nugget) - self.check_arg_bounds() - - @property - def len_scale(self): - """:class:`float`: The main length scale of the model.""" - return self._len_scale - - @len_scale.setter - def len_scale(self, len_scale): - self._len_scale, anis = set_len_anis( - self.dim, len_scale, self.anis, self.latlon - ) - if self.latlon: - self._anis = np.array((self.dim - 1) * [1], dtype=np.double) - else: - self._anis = anis - self.check_arg_bounds() - - @property - def rescale(self): - """:class:`float`: Rescale factor for the length scale of the model.""" - return self._rescale - - @rescale.setter - def rescale(self, rescale): - rescale = self.default_rescale() if rescale is None else rescale - self._rescale = abs(float(rescale)) - - @property - def len_rescaled(self): - """:class:`float`: The rescaled main length scale of the model.""" - return self._len_scale / self._rescale - - @property - def anis(self): - """:class:`numpy.ndarray`: The anisotropy factors of the model.""" - return self._anis - - @anis.setter - def anis(self, anis): - self._len_scale, self._anis = set_len_anis( - self.dim, self.len_scale, anis, self.latlon - ) - self.check_arg_bounds() - - @property - def angles(self): - """:class:`numpy.ndarray`: Rotation angles (in rad) of the model.""" - return self._angles - - @angles.setter - def angles(self, angles): - self._angles = set_model_angles( - self.dim, angles, self.latlon, self.temporal - ) - self.check_arg_bounds() - - @property - def integral_scale(self): - """:class:`float`: The main integral scale of the model. - - Raises - ------ - ValueError - If integral scale is not setable. - """ - self._integral_scale = self.calc_integral_scale() - return self._integral_scale - - @integral_scale.setter - def integral_scale(self, integral_scale): - if integral_scale is not None: - # format int-scale right - self.len_scale = integral_scale - integral_scale = self.len_scale - # reset len_scale - self.len_scale = 1.0 - int_tmp = self.calc_integral_scale() - self.len_scale = integral_scale / int_tmp - if not np.isclose(self.integral_scale, integral_scale, rtol=1e-3): - raise ValueError( - f"{self.name}: Integral scale could not be set correctly! " - "Please just provide a 'len_scale'!" - ) - - @property - def hankel_kw(self): - """:class:`dict`: :any:`hankel.SymmetricFourierTransform` kwargs.""" - return self._hankel_kw - - @hankel_kw.setter - def hankel_kw(self, hankel_kw): - if self._hankel_kw is None or hankel_kw is None: - self._hankel_kw = copy.copy(HANKEL_DEFAULT) - if hankel_kw is not None: - self._hankel_kw.update(hankel_kw) - if self.dim is not None: - self._sft = SFT(ndim=self.dim, **self.hankel_kw) - - @property - def dist_func(self): - """:class:`tuple` of :any:`callable`: pdf, cdf and ppf. - - Spectral distribution info from the model. - """ - pdf = self.spectral_rad_pdf - cdf = None - ppf = None - if self.has_cdf: - cdf = self.spectral_rad_cdf - if self.has_ppf: - ppf = self.spectral_rad_ppf - return pdf, cdf, ppf - - @property - def has_cdf(self): - """:class:`bool`: State if a cdf is defined by the user.""" - return self._has_cdf() - - @property - def has_ppf(self): - """:class:`bool`: State if a ppf is defined by the user.""" - return self._has_ppf() - - @property - def sill(self): - """:class:`float`: The sill of the variogram. - - Notes - ----- - This is calculated by: - * ``sill = variance + nugget`` - """ - return self.var + self.nugget - - @property - def arg(self): - """:class:`list` of :class:`str`: Names of all arguments.""" - return ["var", "len_scale", "nugget", "anis", "angles"] + self._opt_arg - - @property - def arg_list(self): - """:class:`list` of :class:`float`: Values of all arguments.""" - alist = [self.var, self.len_scale, self.nugget, self.anis, self.angles] - for opt in self.opt_arg: - alist.append(getattr(self, opt)) - return alist - - @property - def iso_arg(self): - """:class:`list` of :class:`str`: Names of isotropic arguments.""" - return ["var", "len_scale", "nugget"] + self._opt_arg - - @property - def iso_arg_list(self): - """:class:`list` of :class:`float`: Values of isotropic arguments.""" - alist = [self.var, self.len_scale, self.nugget] - for opt in self.opt_arg: - alist.append(getattr(self, opt)) - return alist - - @property - def opt_arg(self): - """:class:`list` of :class:`str`: Names of the optional arguments.""" - return self._opt_arg - - @property - def len_scale_vec(self): - """:class:`numpy.ndarray`: The length scales in each direction. - - Notes - ----- - This is calculated by: - * ``len_scale_vec[0] = len_scale`` - * ``len_scale_vec[1] = len_scale*anis[0]`` - * ``len_scale_vec[2] = len_scale*anis[1]`` - """ - res = np.zeros(self.dim, dtype=np.double) - res[0] = self.len_scale - for i in range(1, self._dim): - res[i] = self.len_scale * self.anis[i - 1] - return res - - @property - def integral_scale_vec(self): - """:class:`numpy.ndarray`: The integral scales in each direction. - - Notes - ----- - This is calculated by: - * ``integral_scale_vec[0] = integral_scale`` - * ``integral_scale_vec[1] = integral_scale*anis[0]`` - * ``integral_scale_vec[2] = integral_scale*anis[1]`` - """ - res = np.zeros(self.dim, dtype=np.double) - res[0] = self.integral_scale - for i in range(1, self.dim): - res[i] = self.integral_scale * self.anis[i - 1] - return res - - @property - def name(self): - """:class:`str`: The name of the CovModel class.""" - return self.__class__.__name__ - - @property - def do_rotation(self): - """:any:`bool`: State if a rotation is performed.""" - return not np.all(np.isclose(self.angles, 0.0)) - - @property - def is_isotropic(self): - """:any:`bool`: State if a model is isotropic.""" - return np.all(np.isclose(self.anis, 1.0)) - - def __eq__(self, other): - """Compare CovModels.""" - if not isinstance(other, CovModel): - return False - return compare(self, other) - - def __setattr__(self, name, value): - """Set an attribute.""" - super().__setattr__(name, value) - # if an optional variogram argument was given, check bounds - if hasattr(self, "_opt_arg") and name in self._opt_arg: - self.check_arg_bounds() - - def __repr__(self): - """Return String representation.""" - return model_repr(self) diff --git a/src/gstools_cython/covmodel/fit.py b/src/gstools_cython/covmodel/fit.py deleted file mode 100755 index 8b19f497..00000000 --- a/src/gstools_cython/covmodel/fit.py +++ /dev/null @@ -1,540 +0,0 @@ -""" -GStools subpackage providing tools for the covariance-model. - -.. currentmodule:: gstools.covmodel.fit - -The following classes and functions are provided - -.. autosummary:: - fit_variogram -""" - -# pylint: disable=C0103, W0632 -import numpy as np -from scipy.optimize import curve_fit - -from gstools.covmodel.tools import check_arg_in_bounds, default_arg_from_bounds -from gstools.tools.geometric import great_circle_to_chordal, set_anis - -__all__ = ["fit_variogram"] - - -DEFAULT_PARA = ["var", "len_scale", "nugget"] - - -def fit_variogram( - model, - x_data, - y_data, - anis=True, - sill=None, - init_guess="default", - weights=None, - method="trf", - loss="soft_l1", - max_eval=None, - return_r2=False, - curve_fit_kwargs=None, - **para_select, -): - """ - Fitting a variogram-model to an empirical variogram. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model to fit. - x_data : :class:`numpy.ndarray` - The bin-centers of the empirical variogram. - y_data : :class:`numpy.ndarray` - The measured variogram - If multiple are given, they are interpreted as the directional - variograms along the main axis of the associated rotated - coordinate system. - Anisotropy ratios will be estimated in that case. - anis : :class:`bool`, optional - In case of a directional variogram, you can control anisotropy - by this argument. Deselect the parameter from fitting, by setting - it "False". - You could also pass a fixed value to be set in the model. - Then the anisotropy ratios won't be altered during fitting. - Default: True - sill : :class:`float` or :class:`bool` or :any:`None`, optional - Here you can provide a fixed sill for the variogram. - It needs to be in a fitting range for the var and nugget bounds. - If variance or nugget are not selected for estimation, - the nugget will be recalculated to fulfill: - - * sill = var + nugget - * if the variance is bigger than the sill, - nugget will bet set to its lower bound - and the variance will be set to the fitting partial sill. - - If variance is deselected, it needs to be less than the sill, - otherwise a ValueError comes up. Same for nugget. - If sill=False, it will be deselected from estimation - and set to the current sill of the model. - Then, the procedure above is applied. - Default: None - init_guess : :class:`str` or :class:`dict`, optional - Initial guess for the estimation. Either: - - * "default": using the default values of the covariance model - ("len_scale" will be mean of given bin centers; - "var" and "nugget" will be mean of given variogram values - (if in given bounds)) - * "current": using the current values of the covariance model - * dict: dictionary with parameter names and given value - (separate "default" can bet set to "default" or "current" for - unspecified values to get same behavior as given above - ("default" by default)) - Example: ``{"len_scale": 10, "default": "current"}`` - - Default: "default" - weights : :class:`str`, :class:`numpy.ndarray`, :class:`callable`optional - Weights applied to each point in the estimation. Either: - - * 'inv': inverse distance ``1 / (x_data + 1)`` - * list: weights given per bin - * callable: function applied to x_data - - If callable, it must take a 1-d ndarray. Then ``weights = f(x_data)``. - Default: None - method : {'trf', 'dogbox'}, optional - Algorithm to perform minimization. - - * 'trf' : Trust Region Reflective algorithm, particularly suitable - for large sparse problems with bounds. Generally robust method. - * 'dogbox' : dogleg algorithm with rectangular trust regions, - typical use case is small problems with bounds. Not recommended - for problems with rank-deficient Jacobian. - - Default: 'trf' - loss : :class:`str` or :class:`callable`, optional - Determines the loss function in scipys curve_fit. - The following keyword values are allowed: - - * 'linear' (default) : ``rho(z) = z``. Gives a standard - least-squares problem. - * 'soft_l1' : ``rho(z) = 2 * ((1 + z)**0.5 - 1)``. The smooth - approximation of l1 (absolute value) loss. Usually a good - choice for robust least squares. - * 'huber' : ``rho(z) = z if z <= 1 else 2*z**0.5 - 1``. Works - similarly to 'soft_l1'. - * 'cauchy' : ``rho(z) = ln(1 + z)``. Severely weakens outliers - influence, but may cause difficulties in optimization process. - * 'arctan' : ``rho(z) = arctan(z)``. Limits a maximum loss on - a single residual, has properties similar to 'cauchy'. - - If callable, it must take a 1-d ndarray ``z=f**2`` and return an - array_like with shape (3, m) where row 0 contains function values, - row 1 contains first derivatives and row 2 contains second - derivatives. Default: 'soft_l1' - max_eval : :class:`int` or :any:`None`, optional - Maximum number of function evaluations before the termination. - If None (default), the value is chosen automatically: 100 * n. - return_r2 : :class:`bool`, optional - Whether to return the r2 score of the estimation. - Default: False - curve_fit_kwargs : :class:`dict`, optional - Other keyword arguments passed to scipys curve_fit. Default: None - **para_select - You can deselect parameters from fitting, by setting - them "False" using their names as keywords. - You could also pass fixed values for each parameter. - Then these values will be applied and the involved parameters wont - be fitted. - By default, all parameters are fitted. - - Returns - ------- - fit_para : :class:`dict` - Dictionary with the fitted parameter values - pcov : :class:`numpy.ndarray` - The estimated covariance of `popt` from - :any:`scipy.optimize.curve_fit`. - To compute one standard deviation errors - on the parameters use ``perr = np.sqrt(np.diag(pcov))``. - r2_score : :class:`float`, optional - r2 score of the curve fitting results. Only if return_r2 is True. - - Notes - ----- - You can set the bounds for each parameter by accessing - :any:`CovModel.set_arg_bounds`. - - The fitted parameters will be instantly set in the model. - """ - # preprocess selected parameters - para, sill, constrain_sill, anis = _pre_para( - model, para_select, sill, anis - ) - # check curve_fit kwargs - curve_fit_kwargs = {} if curve_fit_kwargs is None else curve_fit_kwargs - # check method - if method not in ["trf", "dogbox"]: - raise ValueError("fit: method needs to be either 'trf' or 'dogbox'") - # prepare variogram data - # => concatenate directional variograms to have a 1D array for x and y - x_data, y_data, is_dir_vario = _check_vario(model, x_data, y_data) - # prepare init guess dictionary - init_guess = _pre_init_guess( - model, init_guess, np.mean(x_data), np.mean(y_data) - ) - # only fit anisotropy if a directional variogram was given - anis &= is_dir_vario - # set weights - _set_weights(model, weights, x_data, curve_fit_kwargs, is_dir_vario) - # set the lower/upper boundaries for the variogram-parameters - bounds, init_guess_list = _init_curve_fit_para( - model, para, init_guess, constrain_sill, sill, anis - ) - # create the fitting curve - curve_fit_kwargs["f"] = _get_curve( - model, para, constrain_sill, sill, anis, is_dir_vario - ) - # set the remaining kwargs for curve_fit - curve_fit_kwargs["bounds"] = bounds - curve_fit_kwargs["p0"] = init_guess_list - curve_fit_kwargs["xdata"] = x_data - curve_fit_kwargs["ydata"] = y_data - curve_fit_kwargs["loss"] = loss - curve_fit_kwargs["max_nfev"] = max_eval - curve_fit_kwargs["method"] = method - # fit the variogram - popt, pcov = curve_fit(**curve_fit_kwargs) - # convert the results - fit_para = _post_fitting(model, para, popt, anis, is_dir_vario) - # calculate the r2 score if wanted - if return_r2: - return fit_para, pcov, _r2_score(model, x_data, y_data, is_dir_vario) - return fit_para, pcov - - -def _pre_para(model, para_select, sill, anis): - """Preprocess selected parameters.""" - var_last = False - var_tmp = 0.0 # init value - for par in para_select: - if par not in model.arg_bounds: - raise ValueError(f"fit: unknown parameter in selection: {par}") - if not isinstance(para_select[par], bool): - if par == "var": - var_last = True - var_tmp = float(para_select[par]) - else: - setattr(model, par, float(para_select[par])) - para_select[par] = False - # set variance last due to possible recalculations - if var_last: - model.var = var_tmp - # remove those that were set to True - para_select = {k: v for k, v in para_select.items() if not v} - # handling the sill - sill = None if (isinstance(sill, bool) and sill) else sill - if sill is not None: - sill = model.sill if isinstance(sill, bool) else float(sill) - constrain_sill = True - sill_low = model.arg_bounds["var"][0] + model.arg_bounds["nugget"][0] - sill_up = model.arg_bounds["var"][1] + model.arg_bounds["nugget"][1] - if not sill_low <= sill <= sill_up: - raise ValueError("fit: sill out of bounds.") - if "var" in para_select and "nugget" in para_select: - if model.var > sill: - model.nugget = model.arg_bounds["nugget"][0] - model.var = sill - model.nugget - else: - model.nugget = sill - model.var - elif "var" in para_select: - if model.var > sill: - raise ValueError( - "fit: if sill is fixed and variance deselected, " - "the set variance should be less than the given sill." - ) - para_select["nugget"] = False - model.nugget = sill - model.var - elif "nugget" in para_select: - if model.nugget > sill: - raise ValueError( - "fit: if sill is fixed and nugget deselected, " - "the set nugget should be less than the given sill." - ) - para_select["var"] = False - model.var = sill - model.nugget - else: - # deselect the nugget, to recalculate it accordingly - # nugget = sill - var - para_select["nugget"] = False - else: - constrain_sill = False - # select all parameters to be fitted - para = {par: True for par in DEFAULT_PARA} - para.update({opt: True for opt in model.opt_arg}) - # now deselect unwanted parameters - para.update(para_select) - # check if anisotropy should be fitted or set - if not isinstance(anis, bool): - model.anis = anis - anis = False - return para, sill, constrain_sill, anis - - -def _pre_init_guess(model, init_guess, mean_x=1.0, mean_y=1.0): - # init guess should be a dict - if not isinstance(init_guess, dict): - init_guess = {"default": init_guess} - # "default" init guess is the respective default value - default_guess = init_guess.pop("default", "default") - if default_guess not in ["default", "current"]: - raise ValueError(f"fit_variogram: unknown def. guess: {default_guess}") - default = default_guess == "default" - # check invalid names for given init guesses - invalid_para = set(init_guess) - set(model.iso_arg + ["anis"]) - if invalid_para: - raise ValueError(f"fit_variogram: unknown init guess: {invalid_para}") - bnd = model.arg_bounds - # default length scale is mean of given bin centers (respecting "rescale") - init_guess.setdefault( - "len_scale", mean_x * model.rescale if default else model.len_scale - ) - # init guess for variance and nugget is mean of given variogram - for par in ["var", "nugget"]: - init_guess.setdefault(par, mean_y if default else getattr(model, par)) - # anis setting - init_guess.setdefault( - "anis", default_arg_from_bounds(bnd["anis"]) if default else model.anis - ) - # correctly handle given values for anis (need a list of values) - init_guess["anis"] = list(set_anis(model.dim, init_guess["anis"])) - # set optional arguments - for opt in model.opt_arg: - init_guess.setdefault( - opt, - ( - default_arg_from_bounds(bnd[opt]) - if default - else getattr(model, opt) - ), - ) - # convert all init guesses to float (except "anis") - for arg in model.iso_arg: - init_guess[arg] = float(init_guess[arg]) - return init_guess - - -def _check_vario(model, x_data, y_data): - # prepare variogram data - x_data = np.asarray(x_data).reshape(-1) - y_data = np.asarray(y_data).reshape(-1) - # if multiple variograms are given, they will be interpreted - # as directional variograms along the main rotated axes of the model - is_dir_vario = False - if model.dim > 1 and x_data.size * model.dim == y_data.size: - is_dir_vario = True - # concatenate multiple variograms - x_data = np.tile(x_data, model.dim) - elif x_data.size != y_data.size: - raise ValueError( - "CovModel.fit_variogram: Wrong number of empirical variograms! " - "Either provide only one variogram to fit an isotropic model, " - "or directional ones for all main axes to fit anisotropy." - ) - if is_dir_vario and model.latlon: - raise ValueError( - "CovModel.fit_variogram: lat-lon models don't support anisotropy." - ) - if model.latlon: - # convert to yadrenko model - x_data = great_circle_to_chordal(x_data, model.geo_scale) - return x_data, y_data, is_dir_vario - - -def _set_weights(model, weights, x_data, curve_fit_kwargs, is_dir_vario): - if weights is not None: - if callable(weights): - weights = 1.0 / weights(x_data) - elif isinstance(weights, str) and weights == "inv": - weights = 1.0 + x_data - else: - if is_dir_vario and weights.size * model.dim == x_data.size: - weights = np.tile(weights, model.dim) - weights = 1.0 / np.asarray(weights).reshape(-1) - curve_fit_kwargs["sigma"] = weights - curve_fit_kwargs["absolute_sigma"] = True - - -def _init_curve_fit_para(model, para, init_guess, constrain_sill, sill, anis): - """Create initial guess and bounds for fitting.""" - low_bounds = [] - top_bounds = [] - init_guess_list = [] - for par in DEFAULT_PARA: - if para[par]: - low_bounds.append(model.arg_bounds[par][0]) - if par == "var" and constrain_sill: # var <= sill in this case - top_bounds.append(sill) - else: - top_bounds.append(model.arg_bounds[par][1]) - init_guess_list.append( - _init_guess( - bounds=[low_bounds[-1], top_bounds[-1]], - default=init_guess[par], - ) - ) - for opt in model.opt_arg: - if para[opt]: - low_bounds.append(model.arg_bounds[opt][0]) - top_bounds.append(model.arg_bounds[opt][1]) - init_guess_list.append( - _init_guess( - bounds=[low_bounds[-1], top_bounds[-1]], - default=init_guess[opt], - ) - ) - if anis: - for i in range(model.dim - 1): - low_bounds.append(model.anis_bounds[0]) - top_bounds.append(model.anis_bounds[1]) - init_guess_list.append( - _init_guess( - bounds=[low_bounds[-1], top_bounds[-1]], - default=init_guess["anis"][i], - ) - ) - return (low_bounds, top_bounds), init_guess_list - - -def _init_guess(bounds, default): - """Proper determination of initial guess.""" - if bounds[0] < default < bounds[1]: - return default - return default_arg_from_bounds(bounds) - - -def _get_curve(model, para, constrain_sill, sill, anis, is_dir_vario): - """Create the curve for scipys curve_fit.""" - var_save = model.var - - # we need arg1, otherwise curve_fit throws an error (bug?!) - def curve(x, arg1, *args): - """Adapted Variogram function.""" - args = (arg1,) + args - para_skip = 0 - opt_skip = 0 - if para["var"]: - var_tmp = args[para_skip] - if constrain_sill: - nugget_tmp = sill - var_tmp - # punishment, if resulting nugget out of range for fixed sill - if check_arg_in_bounds(model, "nugget", nugget_tmp) > 0: - return np.full_like(x, np.inf) - # nugget estimation deselected in this case - model.nugget = nugget_tmp - para_skip += 1 - if para["len_scale"]: - model.len_scale = args[para_skip] - para_skip += 1 - if para["nugget"]: - model.nugget = args[para_skip] - para_skip += 1 - for opt in model.opt_arg: - if para[opt]: - setattr(model, opt, args[para_skip + opt_skip]) - opt_skip += 1 - # set var at last because of var_factor (other parameter needed) - if para["var"]: - model.var = var_tmp - # needs to be reset for TPL models when len_scale was changed - else: - model.var = var_save - if is_dir_vario: - if anis: - model.anis = args[1 - model.dim :] - xs = x[: x.size // model.dim] - out = np.array([], dtype=np.double) - for i in range(model.dim): - out = np.concatenate((out, model.vario_axis(xs, axis=i))) - return out - return model.variogram(x) - - return curve - - -def _post_fitting(model, para, popt, anis, is_dir_vario): - """Postprocess fitting results and application to model.""" - fit_para = {} - para_skip = 0 - opt_skip = 0 - var_tmp = 0.0 # init value - for par in DEFAULT_PARA: - if para[par]: - if par == "var": # set variance last - var_tmp = popt[para_skip] - else: - setattr(model, par, popt[para_skip]) - fit_para[par] = popt[para_skip] - para_skip += 1 - else: - fit_para[par] = getattr(model, par) - for opt in model.opt_arg: - if para[opt]: - setattr(model, opt, popt[para_skip + opt_skip]) - fit_para[opt] = popt[para_skip + opt_skip] - opt_skip += 1 - else: - fit_para[opt] = getattr(model, opt) - if is_dir_vario: - if anis: - model.anis = popt[1 - model.dim :] - fit_para["anis"] = model.anis - # set var at last because of var_factor (other parameter needed) - if para["var"]: - model.var = var_tmp - return fit_para - - -def _r2_score(model, x_data, y_data, is_dir_vario): - """Calculate the R2 score.""" - if is_dir_vario: - xs = x_data[: x_data.size // model.dim] - vario = np.array([], dtype=np.double) - for i in range(model.dim): - vario = np.concatenate((vario, model.vario_axis(xs, axis=i))) - else: - vario = model.variogram(x_data) - residuals = y_data - vario - ss_res = np.sum(residuals**2) - ss_tot = np.sum((y_data - np.mean(y_data)) ** 2) - return 1.0 - (ss_res / ss_tot) - - -def logistic_weights(p=0.1, mean=0.7): # pragma: no cover - """ - Return a logistic weights function. - - Parameters - ---------- - p : :class:`float`, optional - Parameter for the growth rate. - Within this percentage of the data range, the function will - be in the upper resp. lower percentile p. The default is 0.1. - mean : :class:`float`, optional - Percentage of the data range, where this function has its - sigmoid's midpoint. The default is 0.7. - - Returns - ------- - callable - Weighting function. - """ - - # define the callable weights function - def func(x_data): - """Callable function for the weights.""" - x_range = np.amax(x_data) - np.amin(x_data) - # logit function for growth rate - growth = np.log(p / (1 - p)) / (p * x_range) - x_mean = mean * x_range + np.amin(x_data) - return 1.0 / (1.0 + np.exp(growth * (x_mean - x_data))) - - return func diff --git a/src/gstools_cython/covmodel/models.py b/src/gstools_cython/covmodel/models.py deleted file mode 100644 index b1a9d68e..00000000 --- a/src/gstools_cython/covmodel/models.py +++ /dev/null @@ -1,960 +0,0 @@ -""" -GStools subpackage providing different covariance models. - -.. currentmodule:: gstools.covmodel.models - -The following classes are provided - -.. autosummary:: - Gaussian - Exponential - Matern - Integral - Stable - Rational - Cubic - Linear - Circular - Spherical - HyperSpherical - SuperSpherical - JBessel -""" - -# pylint: disable=C0103, E1101, R0201 -import warnings - -import numpy as np -from scipy import special as sps - -from gstools.covmodel.base import CovModel -from gstools.covmodel.tools import AttributeWarning -from gstools.tools.special import exp_int, inc_gamma_low - -__all__ = [ - "Gaussian", - "Exponential", - "Matern", - "Integral", - "Stable", - "Rational", - "Cubic", - "Linear", - "Circular", - "Spherical", - "HyperSpherical", - "SuperSpherical", - "JBessel", -] - - -class Gaussian(CovModel): - r"""The Gaussian covariance model. - - Notes - ----- - This model is given by the following variogram [Webster2007]_: - - .. math:: - \gamma(r)= - \sigma^{2} - \left(1-\exp\left(-\left(s\cdot\frac{r}{\ell}\right)^{2}\right)\right)+n - - Where the standard rescale factor is :math:`s=\frac{\sqrt{\pi}}{2}`. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - - def cor(self, h): - """Gaussian normalized correlation function.""" - return np.exp(-(h**2)) - - def default_rescale(self): - """Gaussian rescaling factor to result in integral scale.""" - return np.sqrt(np.pi) / 2.0 - - def spectral_density(self, k): # noqa: D102 - k = np.asarray(k, dtype=np.double) - return (self.len_rescaled / 2.0 / np.sqrt(np.pi)) ** self.dim * np.exp( - -((k * self.len_rescaled / 2.0) ** 2) - ) - - def spectral_rad_cdf(self, r): - """Gaussian radial spectral cdf.""" - r = np.asarray(r, dtype=np.double) - if self.dim == 1: - return sps.erf(r * self.len_rescaled / 2.0) - if self.dim == 2: - return 1.0 - np.exp(-((r * self.len_rescaled / 2.0) ** 2)) - if self.dim == 3: - return sps.erf( - r * self.len_rescaled / 2.0 - ) - r * self.len_rescaled / np.sqrt(np.pi) * np.exp( - -((r * self.len_rescaled / 2.0) ** 2) - ) - return None # pragma: no cover - - def spectral_rad_ppf(self, u): - """Gaussian radial spectral ppf. - - Notes - ----- - Not defined for 3D. - """ - u = np.asarray(u, dtype=np.double) - if self.dim == 1: - return 2.0 / self.len_rescaled * sps.erfinv(u) - if self.dim == 2: - return 2.0 / self.len_rescaled * np.sqrt(-np.log(1.0 - u)) - return None # pragma: no cover - - def _has_cdf(self): - return self.dim in [1, 2, 3] - - def _has_ppf(self): - return self.dim in [1, 2] - - def calc_integral_scale(self): # noqa: D102 - return self.len_rescaled * np.sqrt(np.pi) / 2.0 - - -class Exponential(CovModel): - r"""The Exponential covariance model. - - Notes - ----- - This model is given by the following variogram [Webster2007]_: - - .. math:: - \gamma(r)= - \sigma^{2} - \left(1-\exp\left(-s\cdot\frac{r}{\ell}\right)\right)+n - - Where the standard rescale factor is :math:`s=1`. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - - def cor(self, h): - """Exponential normalized correlation function.""" - return np.exp(-h) - - def spectral_density(self, k): # noqa: D102 - k = np.asarray(k, dtype=np.double) - return ( - self.len_rescaled**self.dim - * sps.gamma((self.dim + 1) / 2.0) - / (np.pi * (1.0 + (k * self.len_rescaled) ** 2)) - ** ((self.dim + 1) / 2.0) - ) - - def spectral_rad_cdf(self, r): - """Exponential radial spectral cdf.""" - r = np.asarray(r, dtype=np.double) - if self.dim == 1: - return np.arctan(r * self.len_rescaled) * 2.0 / np.pi - if self.dim == 2: - return 1.0 - 1.0 / np.sqrt(1.0 + (r * self.len_rescaled) ** 2) - if self.dim == 3: - return ( - ( - np.arctan(r * self.len_rescaled) - - r - * self.len_rescaled - / (1.0 + (r * self.len_rescaled) ** 2) - ) - * 2.0 - / np.pi - ) - return None # pragma: no cover - - def spectral_rad_ppf(self, u): - """Exponential radial spectral ppf. - - Notes - ----- - Not defined for 3D. - """ - u = np.asarray(u, dtype=np.double) - if self.dim == 1: - return np.tan(np.pi / 2 * u) / self.len_rescaled - if self.dim == 2: - u_power = np.divide( - 1, - u**2, - out=np.full_like(u, np.inf), - where=np.logical_not(np.isclose(u, 0)), - ) - return np.sqrt(u_power - 1.0) / self.len_rescaled - return None # pragma: no cover - - def _has_cdf(self): - return self.dim in [1, 2, 3] - - def _has_ppf(self): - return self.dim in [1, 2] - - def calc_integral_scale(self): # noqa: D102 - return self.len_rescaled - - -class Stable(CovModel): - r"""The stable covariance model. - - Notes - ----- - This model is given by the following correlation function - [Wackernagel2003]_: - - .. math:: - \rho(r) = - \exp\left(- \left(s\cdot\frac{r}{\ell}\right)^{\alpha}\right) - - Where the standard rescale factor is :math:`s=1`. - :math:`\alpha` is a shape parameter with :math:`\alpha\in(0,2]` - - References - ---------- - .. [Wackernagel2003] Wackernagel, H. "Multivariate geostatistics", - Springer, Berlin, Heidelberg (2003) - - Other Parameters - ---------------- - alpha : :class:`float`, optional - Shape parameter. Standard range: ``(0, 2]`` - Default: ``1.5`` - """ - - def default_opt_arg(self): - """Defaults for the optional arguments. - - * ``{"alpha": 1.5}`` - - Returns - ------- - :class:`dict` - Defaults for optional arguments - """ - return {"alpha": 1.5} - - def default_opt_arg_bounds(self): - """Defaults for boundaries of the optional arguments. - - * ``{"alpha": [0, 2, "oc"]}`` - - Returns - ------- - :class:`dict` - Boundaries for optional arguments - """ - return {"alpha": [0, 2, "oc"]} - - def check_opt_arg(self): - """Check the optional arguments. - - Warns - ----- - alpha - If alpha is < 0.3, the model tends to a nugget model and gets - numerically unstable. - """ - if self.alpha < 0.3: - warnings.warn( - "Stable: parameter 'alpha' is < 0.3, " - "count with unstable results", - AttributeWarning, - ) - - def cor(self, h): - r"""Stable normalized correlation function.""" - return np.exp(-np.power(h, self.alpha)) - - def calc_integral_scale(self): # noqa: D102 - return self.len_rescaled * sps.gamma(1.0 + 1.0 / self.alpha) - - -class Matern(CovModel): - r"""The Matérn covariance model. - - Notes - ----- - This model is given by the following correlation function [Rasmussen2003]_: - - .. math:: - \rho(r) = - \frac{2^{1-\nu}}{\Gamma\left(\nu\right)} \cdot - \left(\sqrt{\nu}\cdot s\cdot\frac{r}{\ell}\right)^{\nu} \cdot - \mathrm{K}_{\nu}\left(\sqrt{\nu}\cdot s\cdot\frac{r}{\ell}\right) - - Where the standard rescale factor is :math:`s=1`. - :math:`\Gamma` is the gamma function and :math:`\mathrm{K}_{\nu}` - is the modified Bessel function of the second kind. - - :math:`\nu` is a shape parameter and should be >= 0.2. - - If :math:`\nu > 20`, a gaussian model is used, since it represents - the limiting case: - - .. math:: - \rho(r) = - \exp\left(-\left(s\cdot\frac{r}{2\ell}\right)^2\right) - - References - ---------- - .. [Rasmussen2003] Rasmussen, C. E., - "Gaussian processes in machine learning." Summer school on - machine learning. Springer, Berlin, Heidelberg, (2003) - - Other Parameters - ---------------- - nu : :class:`float`, optional - Shape parameter. Standard range: ``[0.2, 30]`` - Default: ``1.0`` - """ - - def default_opt_arg(self): - """Defaults for the optional arguments. - - * ``{"nu": 1.0}`` - - Returns - ------- - :class:`dict` - Defaults for optional arguments - """ - return {"nu": 1.0} - - def default_opt_arg_bounds(self): - """Defaults for boundaries of the optional arguments. - - * ``{"nu": [0.2, 30.0, "cc"]}`` - - Returns - ------- - :class:`dict` - Boundaries for optional arguments - """ - return {"nu": [0.2, 30.0, "cc"]} - - def cor(self, h): - """Matérn normalized correlation function.""" - h = np.asarray(np.abs(h), dtype=np.double) - # for nu > 20 we just use the gaussian model - if self.nu > 20.0: - return np.exp(-((h / 2.0) ** 2)) - # calculate by log-transformation to prevent numerical errors - h_gz = h[h > 0.0] - res = np.ones_like(h) - res[h > 0.0] = np.exp( - (1.0 - self.nu) * np.log(2) - - sps.loggamma(self.nu) - + self.nu * np.log(np.sqrt(self.nu) * h_gz) - ) * sps.kv(self.nu, np.sqrt(self.nu) * h_gz) - # if nu >> 1 we get errors for the farfield, there 0 is approached - res[np.logical_not(np.isfinite(res))] = 0.0 - # covariance is positive - res = np.maximum(res, 0.0) - return res - - def spectral_density(self, k): # noqa: D102 - k = np.asarray(k, dtype=np.double) - x = (k * self.len_rescaled) ** 2 - # for nu > 20 we just use an approximation of the gaussian model - if self.nu > 20.0: - return ( - (self.len_rescaled / np.sqrt(np.pi)) ** self.dim - * np.exp(-x) - * (1 + 0.5 * x**2 / self.nu) - * np.sqrt(1 + x / self.nu) ** (-self.dim) - ) - return (self.len_rescaled / np.sqrt(np.pi)) ** self.dim * np.exp( - -(self.nu + self.dim / 2.0) * np.log(1.0 + x / self.nu) - + sps.loggamma(self.nu + self.dim / 2.0) - - sps.loggamma(self.nu) - - self.dim * np.log(np.sqrt(self.nu)) - ) - - def calc_integral_scale(self): # noqa: D102 - return ( - self.len_rescaled - * np.pi - / np.sqrt(self.nu) - / sps.beta(self.nu, 0.5) - ) - - -class Integral(CovModel): - r"""The Exponential Integral covariance model. - - Notes - ----- - This model is given by the following correlation function [Mueller2021]_: - - .. math:: - \rho(r) = - \frac{\nu}{2}\cdot - E_{1+\frac{\nu}{2}}\left( \left( s\cdot\frac{r}{\ell} \right)^2 \right) - - Where the standard rescale factor is :math:`s=1`. - :math:`E_s(x)` is the exponential integral. - - :math:`\nu` is a shape parameter (1 by default). - - For :math:`\nu \to \infty`, a gaussian model is approached, since it represents - the limiting case: - - .. math:: - \rho(r) = - \exp\left(-\left(s\cdot\frac{r}{\ell}\right)^2\right) - - References - ---------- - .. [Mueller2021] Müller, S., Heße, F., Attinger, S., and Zech, A., - "The extended generalized radial flow model and effective - conductivity for truncated power law variograms", - Adv. Water Resour., 156, 104027, (2021) - - Other Parameters - ---------------- - nu : :class:`float`, optional - Shape parameter. Standard range: ``(0.0, 50]`` - Default: ``1.0`` - """ - - def default_opt_arg(self): - """Defaults for the optional arguments. - - * ``{"nu": 1.0}`` - - Returns - ------- - :class:`dict` - Defaults for optional arguments - """ - return {"nu": 1.0} - - def default_opt_arg_bounds(self): - """Defaults for boundaries of the optional arguments. - - * ``{"nu": [0.0, 50.0, "oc"]}`` - - Returns - ------- - :class:`dict` - Boundaries for optional arguments - """ - return {"nu": [0.0, 50.0, "oc"]} - - def cor(self, h): - """Exponential Integral normalized correlation function.""" - h = np.asarray(h, dtype=np.double) - return 0.5 * self.nu * exp_int(1.0 + 0.5 * self.nu, h**2) - - def spectral_density(self, k): # noqa: D102 - k = np.asarray(k, dtype=np.double) - fac = (0.5 * self.len_rescaled / np.sqrt(np.pi)) ** self.dim - lim = fac * self.nu / (self.nu + self.dim) - # for nu > 50 we just use an approximation of the gaussian model - if self.nu > 50.0: - x = (k * self.len_rescaled / 2) ** 2 - return lim * np.exp(-x) * (1 + 2 * x / (self.nu + self.dim + 2)) - # separate calculation at origin - s = (self.nu + self.dim) / 2 - res = np.empty_like(k) - k_gz = np.logical_not(np.isclose(k, 0)) - x = (k[k_gz] * self.len_rescaled / 2) ** 2 - # limit at k=0 (inc_gamma_low(s, x) / x**s -> 1/s for x -> 0) - res[np.logical_not(k_gz)] = lim - res[k_gz] = 0.5 * self.nu * fac / x**s * inc_gamma_low(s, x) - return res - - def calc_integral_scale(self): # noqa: D102 - return ( - self.len_rescaled * self.nu * np.sqrt(np.pi) / (2 * self.nu + 2.0) - ) - - -class Rational(CovModel): - r"""The rational quadratic covariance model. - - Notes - ----- - This model is given by the following correlation function [Rasmussen2003]_: - - .. math:: - \rho(r) = - \left(1 + \frac{1}{\alpha} \cdot - \left(s\cdot\frac{r}{\ell}\right)^2\right)^{-\alpha} - - Where the standard rescale factor is :math:`s=1`. - :math:`\alpha` is a shape parameter and should be > 0.5. - - For :math:`\alpha\to\infty` this model converges to the Gaussian model: - - .. math:: - \rho(r)= - \exp\left(-\left(s\cdot\frac{r}{\ell}\right)^{2}\right) - - References - ---------- - .. [Rasmussen2003] Rasmussen, C. E., - "Gaussian processes in machine learning." Summer school on - machine learning. Springer, Berlin, Heidelberg, (2003) - - Other Parameters - ---------------- - alpha : :class:`float`, optional - Shape parameter. Standard range: ``[0.5, 50]`` - Default: ``1.0`` - """ - - def default_opt_arg(self): - """Defaults for the optional arguments. - - * ``{"alpha": 1.0}`` - - Returns - ------- - :class:`dict` - Defaults for optional arguments - """ - return {"alpha": 1.0} - - def default_opt_arg_bounds(self): - """Defaults for boundaries of the optional arguments. - - * ``{"alpha": [0.5, 50.0]}`` - - Returns - ------- - :class:`dict` - Boundaries for optional arguments - """ - return {"alpha": [0.5, 50.0]} - - def cor(self, h): - """Rational normalized correlation function.""" - return np.power(1 + h**2 / self.alpha, -self.alpha) - - def calc_integral_scale(self): # noqa: D102 - return ( - self.len_rescaled - * np.sqrt(np.pi * self.alpha) - * sps.gamma(self.alpha - 0.5) - / sps.gamma(self.alpha) - / 2.0 - ) - - -class Cubic(CovModel): - r"""The Cubic covariance model. - - A model with reverse curvature near the origin and a finite range of - correlation. - - Notes - ----- - This model is given by the following correlation function [Chiles2009]_: - - .. math:: - \rho(r) = - \begin{cases} - 1- 7 \left(s\cdot\frac{r}{\ell}\right)^{2} - + \frac{35}{4} \left(s\cdot\frac{r}{\ell}\right)^{3} - - \frac{7}{2} \left(s\cdot\frac{r}{\ell}\right)^{5} - + \frac{3}{4} \left(s\cdot\frac{r}{\ell}\right)^{7} - & r<\frac{\ell}{s}\\ - 0 & r\geq\frac{\ell}{s} - \end{cases} - - Where the standard rescale factor is :math:`s=1`. - - References - ---------- - .. [Chiles2009] Chiles, J. P., & Delfiner, P., - "Geostatistics: modeling spatial uncertainty" (Vol. 497), - John Wiley & Sons. (2009) - """ - - def cor(self, h): - """Spherical normalized correlation function.""" - h = np.minimum(np.abs(h, dtype=np.double), 1.0) - return 1.0 - 7 * h**2 + 8.75 * h**3 - 3.5 * h**5 + 0.75 * h**7 - - -class Linear(CovModel): - r"""The bounded linear covariance model. - - This model is derived from the relative intersection area of - two lines in 1D, where the middle points have a distance of :math:`r` - and the line lengths are :math:`\ell`. - - Notes - ----- - This model is given by the following correlation function [Webster2007]_: - - .. math:: - \rho(r) = - \begin{cases} - 1-s\cdot\frac{r}{\ell} & r<\frac{\ell}{s}\\ - 0 & r\geq\frac{\ell}{s} - \end{cases} - - Where the standard rescale factor is :math:`s=1`. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - - def cor(self, h): - """Linear normalized correlation function.""" - return np.maximum(1 - np.abs(h, dtype=np.double), 0.0) - - def check_dim(self, dim): - """Linear model is only valid in 1D.""" - return dim < 2 - - -class Circular(CovModel): - r"""The circular covariance model. - - This model is derived as the relative intersection area of - two discs in 2D, where the middle points have a distance of :math:`r` - and the diameters are given by :math:`\ell`. - - Notes - ----- - This model is given by the following correlation function [Webster2007]_: - - .. math:: - \rho(r) = - \begin{cases} - \frac{2}{\pi}\cdot - \left( - \cos^{-1}\left(s\cdot\frac{r}{\ell}\right) - - s\cdot\frac{r}{\ell}\cdot\sqrt{1-\left(s\cdot\frac{r}{\ell}\right)^{2}} - \right) - & r<\frac{\ell}{s}\\ - 0 & r\geq\frac{\ell}{s} - \end{cases} - - Where the standard rescale factor is :math:`s=1`. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - - def cor(self, h): - """Circular normalized correlation function.""" - h = np.asarray(np.abs(h), dtype=np.double) - res = np.zeros_like(h) - # arccos is instable around h=1 - h_l1 = h < 1.0 - h_low = h[h_l1] - res[h_l1] = ( - 2 / np.pi * (np.arccos(h_low) - h_low * np.sqrt(1 - h_low**2)) - ) - return res - - def check_dim(self, dim): - """Circular model is only valid in 1D and 2D.""" - return dim < 3 - - -class Spherical(CovModel): - r"""The Spherical covariance model. - - This model is derived from the relative intersection area of - two spheres in 3D, where the middle points have a distance of :math:`r` - and the diameters are given by :math:`\ell`. - - Notes - ----- - This model is given by the following correlation function [Webster2007]_: - - .. math:: - \rho(r) = - \begin{cases} - 1-\frac{3}{2}\cdot s\cdot\frac{r}{\ell} + - \frac{1}{2}\cdot\left(s\cdot\frac{r}{\ell}\right)^{3} - & r<\frac{\ell}{s}\\ - 0 & r\geq\frac{\ell}{s} - \end{cases} - - Where the standard rescale factor is :math:`s=1`. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - - def cor(self, h): - """Spherical normalized correlation function.""" - h = np.minimum(np.abs(h, dtype=np.double), 1.0) - return 1.0 - 1.5 * h + 0.5 * h**3 - - def check_dim(self, dim): - """Spherical model is only valid in 1D, 2D and 3D.""" - return dim < 4 - - -class HyperSpherical(CovModel): - r"""The Hyper-Spherical covariance model. - - This model is derived from the relative intersection area of - two d-dimensional hyperspheres, - where the middle points have a distance of :math:`r` - and the diameters are given by :math:`\ell`. - - In 1D this is the Linear model, in 2D the Circular model - and in 3D the Spherical model. - - Notes - ----- - This model is given by the following correlation function [Matern1960]_: - - .. math:: - \rho(r) = - \begin{cases} - 1-s\cdot\frac{r}{\ell}\cdot\frac{ - _{2}F_{1}\left(\frac{1}{2},-\frac{d-1}{2},\frac{3}{2}, - \left(s\cdot\frac{r}{\ell}\right)^{2}\right)} - {_{2}F_{1}\left(\frac{1}{2},-\frac{d-1}{2},\frac{3}{2},1\right)} - & r<\frac{\ell}{s}\\ - 0 & r\geq\frac{\ell}{s} - \end{cases} - - Where the standard rescale factor is :math:`s=1`. - :math:`d` is the dimension. - - References - ---------- - .. [Matern1960] Matern B., "Spatial Variation", - Swedish National Institute for Forestry Research, (1960) - """ - - def cor(self, h): - """Hyper-Spherical normalized correlation function.""" - h = np.asarray(h, dtype=np.double) - res = np.zeros_like(h) - h_l1 = h < 1 - nu = (self.dim - 1.0) / 2.0 - fac = 1.0 / sps.hyp2f1(0.5, -nu, 1.5, 1) - res[h_l1] = 1 - h[h_l1] * fac * sps.hyp2f1(0.5, -nu, 1.5, h[h_l1] ** 2) - return res - - def spectral_density(self, k): # noqa: D102 - k = np.asarray(k, dtype=np.double) - res = np.empty_like(k) - kl = k * self.len_rescaled - kl_gz = np.logical_not(np.isclose(k, 0)) - res[kl_gz] = sps.gamma(self.dim / 2 + 1) / np.sqrt(np.pi) ** self.dim - res[kl_gz] *= sps.jv(self.dim / 2, kl[kl_gz] / 2) ** 2 - res[kl_gz] /= k[kl_gz] ** self.dim - res[np.logical_not(kl_gz)] = ( - (self.len_rescaled / 4) ** self.dim - / sps.gamma(self.dim / 2 + 1) - / np.sqrt(np.pi) ** self.dim - ) - return res - - -class SuperSpherical(CovModel): - r"""The Super-Spherical covariance model. - - This model is derived from the relative intersection area of - two d-dimensional hyperspheres, - where the middle points have a distance of :math:`r` - and the diameters are given by :math:`\ell`. - It is than valid in all lower dimensions. - By default it coincides with the Hyper-Spherical model. - - Notes - ----- - This model is given by the following correlation function [Matern1960]_: - - .. math:: - \rho(r) = - \begin{cases} - 1-s\cdot\frac{r}{\ell}\cdot\frac{ - _{2}F_{1}\left(\frac{1}{2},-\nu,\frac{3}{2}, - \left(s\cdot\frac{r}{\ell}\right)^{2}\right)} - {_{2}F_{1}\left(\frac{1}{2},-\nu,\frac{3}{2},1\right)} - & r<\frac{\ell}{s}\\ - 0 & r\geq\frac{\ell}{s} - \end{cases} - - Where the standard rescale factor is :math:`s=1`. - :math:`\nu\geq\frac{d-1}{2}` is a shape parameter. - - References - ---------- - .. [Matern1960] Matern B., "Spatial Variation", - Swedish National Institute for Forestry Research, (1960) - - Other Parameters - ---------------- - nu : :class:`float`, optional - Shape parameter. Standard range: ``[(dim-1)/2, 50]`` - Default: ``(dim-1)/2`` - """ - - def default_opt_arg(self): - """Defaults for the optional arguments. - - * ``{"nu": (dim-1)/2}`` - - Returns - ------- - :class:`dict` - Defaults for optional arguments - """ - return {"nu": (self.dim - 1) / 2} - - def default_opt_arg_bounds(self): - """Defaults for boundaries of the optional arguments. - - * ``{"nu": [(dim-1)/2, 50.0]}`` - - Returns - ------- - :class:`dict` - Boundaries for optional arguments - """ - return {"nu": [(self.dim - 1) / 2, 50.0]} - - def cor(self, h): - """Super-Spherical normalized correlation function.""" - h = np.asarray(h, dtype=np.double) - res = np.zeros_like(h) - h_l1 = h < 1 - fac = 1.0 / sps.hyp2f1(0.5, -self.nu, 1.5, 1.0) - res[h_l1] = 1.0 - h[h_l1] * fac * sps.hyp2f1( - 0.5, -self.nu, 1.5, h[h_l1] ** 2 - ) - return res - - -class JBessel(CovModel): - r"""The J-Bessel hole model. - - This covariance model is a valid hole model, meaning it has areas - of negative correlation but a valid spectral density. - - Notes - ----- - This model is given by the following correlation function [Chiles2009]_: - - .. math:: - \rho(r) = - \Gamma(\nu+1) \cdot - \frac{\mathrm{J}_{\nu}\left(s\cdot\frac{r}{\ell}\right)} - {\left(s\cdot\frac{r}{2\ell}\right)^{\nu}} - - Where the standard rescale factor is :math:`s=1`. - :math:`\Gamma` is the gamma function and :math:`\mathrm{J}_{\nu}` - is the Bessel functions of the first kind. - :math:`\nu\geq\frac{d}{2}-1` is a shape parameter, - which defaults to :math:`\nu=\frac{d}{2}`, - since the spectrum of the model gets instable for - :math:`\nu\to\frac{d}{2}-1`. - - For :math:`\nu=\frac{1}{2}` (valid in d=1,2,3) - we get the so-called 'Wave' model: - - .. math:: - \rho(r) = - \frac{\sin\left(s\cdot\frac{r}{\ell}\right)}{s\cdot\frac{r}{\ell}} - - References - ---------- - .. [Chiles2009] Chiles, J. P., & Delfiner, P., - "Geostatistics: modeling spatial uncertainty" (Vol. 497), - John Wiley & Sons. (2009) - - Other Parameters - ---------------- - nu : :class:`float`, optional - Shape parameter. Standard range: ``[dim/2 - 1, 50]`` - Default: ``dim/2`` - """ - - def default_opt_arg(self): - """Defaults for the optional arguments. - - * ``{"nu": dim/2}`` - - Returns - ------- - :class:`dict` - Defaults for optional arguments - """ - return {"nu": self.dim / 2} - - def default_opt_arg_bounds(self): - """Defaults for boundaries of the optional arguments. - - * ``{"nu": [dim/2 - 1, 50.0]}`` - - Returns - ------- - :class:`dict` - Boundaries for optional arguments - """ - return {"nu": [self.dim / 2 - 1, 50.0]} - - def check_opt_arg(self): - """Check the optional arguments. - - Warns - ----- - nu - If nu is close to dim/2 - 1, the model tends to get unstable. - """ - if abs(self.nu - self.dim / 2 + 1) < 0.01: - warnings.warn( - "JBessel: parameter 'nu' is close to d/2-1, " - "count with unstable results", - AttributeWarning, - ) - - def cor(self, h): - """J-Bessel correlation.""" - h = np.asarray(h, dtype=np.double) - h_gz = np.logical_not(np.isclose(h, 0)) - hh = h[h_gz] - res = np.ones_like(h) - nu = self.nu - res[h_gz] = sps.gamma(nu + 1) * sps.jv(nu, hh) / (hh / 2.0) ** nu - return res - - def spectral_density(self, k): # noqa: D102 - k = np.asarray(k, dtype=np.double) - k_ll = k < 1.0 / self.len_rescaled - kk = k[k_ll] - res = np.zeros_like(k) - # the model is degenerated for nu=d/2-1, so we tweak the spectral pdf - # and cut of the divisor at nu-(d/2-1)=0.01 (gamma(0.01) about 100) - res[k_ll] = ( - (self.len_rescaled / np.sqrt(np.pi)) ** self.dim - * sps.gamma(self.nu + 1.0) - / np.minimum(sps.gamma(self.nu - self.dim / 2 + 1), 100.0) - * (1.0 - (kk * self.len_rescaled) ** 2) ** (self.nu - self.dim / 2) - ) - return res diff --git a/src/gstools_cython/covmodel/plot.py b/src/gstools_cython/covmodel/plot.py deleted file mode 100644 index 32148c14..00000000 --- a/src/gstools_cython/covmodel/plot.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -GStools subpackage providing plotting routines for the covariance models. - -.. currentmodule:: gstools.covmodel.plot - -The following classes and functions are provided - -.. autosummary:: - :toctree: - - plot_variogram - plot_covariance - plot_correlation - plot_vario_yadrenko - plot_cov_yadrenko - plot_cor_yadrenko - plot_vario_axis - plot_cov_axis - plot_cor_axis - plot_vario_spatial - plot_cov_spatial - plot_cor_spatial - plot_spectrum - plot_spectral_density - plot_spectral_rad_pdf -""" - -# pylint: disable=C0103, C0415, E1130 -import numpy as np - -from gstools.tools.geometric import generate_grid -from gstools.tools.misc import get_fig_ax - -__all__ = [ - "plot_variogram", - "plot_covariance", - "plot_correlation", - "plot_vario_yadrenko", - "plot_cov_yadrenko", - "plot_cor_yadrenko", - "plot_vario_axis", - "plot_cov_axis", - "plot_cor_axis", - "plot_vario_spatial", - "plot_cov_spatial", - "plot_cor_spatial", - "plot_spectrum", - "plot_spectral_density", - "plot_spectral_rad_pdf", -] - - -# plotting routines ####################################################### - - -def _plot_spatial(dim, pos, field, fig, ax, temporal, **kwargs): - from gstools.field.plot import plot_1d, plot_nd - - if dim == 1: - return plot_1d(pos, field, fig, ax, temporal, **kwargs) - return plot_nd( - pos, field, "structured", fig, ax, temporal=temporal, **kwargs - ) - - -def plot_vario_spatial( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot spatial variogram of a given CovModel.""" - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(-x_max, x_max) + x_min - pos = [x_s] * model.dim - shp = tuple(len(p) for p in pos) - fld = model.vario_spatial(generate_grid(pos)).reshape(shp) - return _plot_spatial( - model.dim, pos, fld, fig, ax, model.temporal, **kwargs - ) - - -def plot_cov_spatial( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot spatial covariance of a given CovModel.""" - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(-x_max, x_max) + x_min - pos = [x_s] * model.dim - shp = tuple(len(p) for p in pos) - fld = model.cov_spatial(generate_grid(pos)).reshape(shp) - return _plot_spatial( - model.dim, pos, fld, fig, ax, model.temporal, **kwargs - ) - - -def plot_cor_spatial( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot spatial correlation of a given CovModel.""" - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(-x_max, x_max) + x_min - pos = [x_s] * model.dim - shp = tuple(len(p) for p in pos) - fld = model.cor_spatial(generate_grid(pos)).reshape(shp) - return _plot_spatial( - model.dim, pos, fld, fig, ax, model.temporal, **kwargs - ) - - -def plot_variogram( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot variogram of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} variogram") - ax.plot(x_s, model.variogram(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_covariance( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot covariance of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} covariance") - ax.plot(x_s, model.covariance(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_correlation( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot correlation function of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} correlation") - ax.plot(x_s, model.correlation(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_vario_yadrenko( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot Yadrenko variogram of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = min(3 * model.len_scale, model.geo_scale * np.pi) - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} Yadrenko variogram") - ax.plot(x_s, model.vario_yadrenko(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_cov_yadrenko( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot Yadrenko covariance of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = min(3 * model.len_scale, model.geo_scale * np.pi) - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} Yadrenko covariance") - ax.plot(x_s, model.cov_yadrenko(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_cor_yadrenko( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot Yadrenko correlation function of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = min(3 * model.len_scale, model.geo_scale * np.pi) - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} Yadrenko correlation") - ax.plot(x_s, model.cor_yadrenko(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_vario_axis( - model, axis=0, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot variogram of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} variogram on axis {axis}") - ax.plot(x_s, model.vario_axis(x_s, axis), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_cov_axis( - model, axis=0, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot variogram of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} covariance on axis {axis}") - ax.plot(x_s, model.cov_axis(x_s, axis), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_cor_axis( - model, axis=0, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot variogram of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 * model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} correlation on axis {axis}") - ax.plot(x_s, model.cor_axis(x_s, axis), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_spectrum( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot spectrum of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 / model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} {model.dim}D spectrum") - ax.plot(x_s, model.spectrum(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_spectral_density( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot spectral density of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 / model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} {model.dim}D spectral-density") - ax.plot(x_s, model.spectral_density(x_s), **kwargs) - ax.legend() - fig.show() - return ax - - -def plot_spectral_rad_pdf( - model, x_min=0.0, x_max=None, fig=None, ax=None, **kwargs -): # pragma: no cover - """Plot radial spectral pdf of a given CovModel.""" - fig, ax = get_fig_ax(fig, ax) - if x_max is None: - x_max = 3 / model.len_scale - x_s = np.linspace(x_min, x_max) - kwargs.setdefault("label", f"{model.name} {model.dim}D spectral-rad-pdf") - ax.plot(x_s, model.spectral_rad_pdf(x_s), **kwargs) - ax.legend() - fig.show() - return ax diff --git a/src/gstools_cython/covmodel/tools.py b/src/gstools_cython/covmodel/tools.py deleted file mode 100644 index dddeb441..00000000 --- a/src/gstools_cython/covmodel/tools.py +++ /dev/null @@ -1,647 +0,0 @@ -""" -GStools subpackage providing tools for the covariance-model. - -.. currentmodule:: gstools.covmodel.tools - -The following classes and functions are provided - -.. autosummary:: - AttributeWarning - rad_fac - set_opt_args - set_len_anis - check_bounds - check_arg_in_bounds - default_arg_from_bounds - spectral_rad_pdf - percentile_scale - set_arg_bounds - check_arg_bounds - set_dim - compare - model_repr -""" - -# pylint: disable=C0103, W0212 -import warnings - -import numpy as np -from hankel import SymmetricFourierTransform as SFT -from scipy import special as sps -from scipy.optimize import root - -from gstools.tools.geometric import no_of_angles, set_angles, set_anis -from gstools.tools.misc import list_format - -__all__ = [ - "AttributeWarning", - "rad_fac", - "set_opt_args", - "set_len_anis", - "set_model_angles", - "check_bounds", - "check_arg_in_bounds", - "default_arg_from_bounds", - "spectral_rad_pdf", - "percentile_scale", - "set_arg_bounds", - "check_arg_bounds", - "set_dim", - "compare", - "model_repr", -] - - -class AttributeWarning(UserWarning): - """Attribute warning for CovModel class.""" - - -def _init_subclass(cls): - """Initialize gstools covariance model.""" - - def variogram(self, r): - """Isotropic variogram of the model.""" - return self.var - self.covariance(r) + self.nugget - - def covariance(self, r): - """Covariance of the model.""" - return self.var * self.correlation(r) - - def correlation(self, r): - """Correlation function of the model.""" - return 1.0 - (self.variogram(r) - self.nugget) / self.var - - def correlation_from_cor(self, r): - """Correlation function of the model.""" - r = np.asarray(np.abs(r), dtype=np.double) - return self.cor(r / self.len_rescaled) - - def cor_from_correlation(self, h): - """Correlation taking a non-dimensional range.""" - h = np.asarray(np.abs(h), dtype=np.double) - return self.correlation(h * self.len_rescaled) - - abstract = True - if hasattr(cls, "cor"): - if not hasattr(cls, "correlation"): - cls.correlation = correlation_from_cor - abstract = False - else: - cls.cor = cor_from_correlation - if not hasattr(cls, "variogram"): - cls.variogram = variogram - else: - abstract = False - if not hasattr(cls, "covariance"): - cls.covariance = covariance - else: - abstract = False - if not hasattr(cls, "correlation"): - cls.correlation = correlation - else: - abstract = False - if abstract: - raise TypeError( - f"Can't instantiate class '{cls.__name__}', " - "without providing at least one of the methods " - "'cor', 'variogram', 'covariance' or 'correlation'." - ) - - -# Helping functions ########################################################### - - -def rad_fac(dim, r): - """Volume element of the n-dimensional spherical coordinates. - - Given as a factor for integration of a radial-symmetric function. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - r : :class:`numpy.ndarray` - Given radii. - """ - if dim == 1: - fac = 2.0 - elif dim == 2: - fac = 2 * np.pi * r - elif dim == 3: - fac = 4 * np.pi * r**2 - else: # pragma: no cover - fac = ( - dim - * r ** (dim - 1) - * np.sqrt(np.pi) ** dim - / sps.gamma(dim / 2 + 1) - ) - return fac - - -def set_opt_args(model, opt_arg): - """ - Set optional arguments in the model class. - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - opt_arg : :class:`dict` - Dictionary with optional arguments. - - Raises - ------ - ValueError - When an optional argument has an already taken name. - """ - model._opt_arg = [] - # look up the defaults for the optional arguments (defined by the user) - default = model.default_opt_arg() - for opt_name in opt_arg: - if opt_name not in default: - warnings.warn( - f"The given optional argument '{opt_name}' " - "is unknown or has at least no defined standard value. " - "Or you made a Typo... hehe.", - AttributeWarning, - ) - # add the default values if not specified - for def_arg in default: - if def_arg not in opt_arg: - opt_arg[def_arg] = default[def_arg] - # save names of the optional arguments (sort them by name) - model._opt_arg = sorted(opt_arg) - # add the optional arguments as attributes to the class - for opt_name in opt_arg: - if opt_name in dir(model): # "dir" also respects properties - raise ValueError( - f"parameter '{opt_name}' has a 'bad' name, " - "since it is already present in " - "the class. It could not be added to the model." - ) - # Magic happens here - setattr(model, opt_name, float(opt_arg[opt_name])) - - -def set_len_anis(dim, len_scale, anis, latlon=False): - """Set the length scale and anisotropy factors for the given dimension. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - len_scale : :class:`float` or :class:`list` - the length scale of the SRF in x direction or in x- (y-, ...) direction - anis : :class:`float` or :class:`list` - the anisotropy of length scales along the transversal axes - latlon : :class:`bool`, optional - Whether the model is describing 2D fields on earths surface described - by latitude and longitude. In this case there is no spatial anisotropy. - Default: False - - Returns - ------- - len_scale : :class:`float` - the main length scale of the SRF in x direction - anis : :class:`list`, optional - the anisotropy of length scales along the transversal axes - - Notes - ----- - If ``len_scale`` is given by at least two values, - ``anis`` will be recalculated. - - If ``len_scale`` is given as list with to few values, the latter value will - be used for the remaining dimensions. (e.g. [l_1, l_2] in 3D is equal to - [l_1, l_2, l_2]) - - If to few ``anis`` values are given, the first dimensions will be filled - up with 1. (eg. anis=[e] in 3D is equal to anis=[1, e]) - """ - ls_tmp = np.array(len_scale, dtype=np.double) - ls_tmp = np.atleast_1d(ls_tmp)[:dim] - # use just one length scale (x-direction) - out_len_scale = ls_tmp[0] - # set the anisotropies in y- and z-direction according to the input - if len(ls_tmp) == 1: - out_anis = set_anis(dim, anis) - else: - # fill up length-scales with the latter len_scale, such that len()==dim - if len(ls_tmp) < dim: - ls_tmp = np.pad(ls_tmp, (0, dim - len(ls_tmp)), "edge") - # if multiple length-scales are given, calculate the anisotropies - out_anis = np.zeros(dim - 1, dtype=np.double) - for i in range(1, dim): - out_anis[i - 1] = ls_tmp[i] / ls_tmp[0] - # sanity check - for ani in out_anis: - if not ani > 0.0: - raise ValueError( - f"anisotropy-ratios needs to be > 0, got: {out_anis}" - ) - # no spatial anisotropy for latlon - if latlon: - out_anis[:2] = 1.0 - return out_len_scale, out_anis - - -def set_model_angles(dim, angles, latlon=False, temporal=False): - """Set the model angles for the given dimension. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the angles of the SRF - latlon : :class:`bool`, optional - Whether the model is describing 2D fields on earths surface described - by latitude and longitude. - Default: False - temporal : :class:`bool`, optional - Whether a time-dimension is appended. - Default: False - - Returns - ------- - angles : :class:`float` - the angles fitting to the dimension - - Notes - ----- - If too few angles are given, they are filled up with `0`. - """ - if latlon: - return np.array(no_of_angles(dim) * [0], dtype=np.double) - out_angles = set_angles(dim, angles) - if temporal: - # no rotation between spatial dimensions and temporal dimension - out_angles[no_of_angles(dim - 1) :] = 0.0 - return out_angles - - -def check_bounds(bounds): - """ - Check if given bounds are valid. - - Parameters - ---------- - bounds : list - bound can contain 2 to 3 values: - 1. lower bound - float - 2. upper bound - float - 3. Interval type (optional) - * "oo" : open - open - * "oc" : open - close - * "co" : close - open - * "cc" : close - close - """ - if len(bounds) not in (2, 3): - return False - if bounds[1] <= bounds[0]: - return False - if len(bounds) == 3 and bounds[2] not in ("oo", "oc", "co", "cc"): - return False - return True - - -def check_arg_in_bounds(model, arg, val=None): - """Check if given argument value is in bounds of the given model.""" - if arg not in model.arg_bounds: - raise ValueError(f"check bounds: unknown argument: {arg}") - bnd = list(model.arg_bounds[arg]) - val = getattr(model, arg) if val is None else val - val = np.asarray(val) - error_case = 0 - if len(bnd) == 2: - bnd.append("cc") # use closed intervals by default - if bnd[2][0] == "c": - if np.any(val < bnd[0]): - error_case = 1 - else: - if np.any(val <= bnd[0]): - error_case = 2 - if bnd[2][1] == "c": - if np.any(val > bnd[1]): - error_case = 3 - else: - if np.any(val >= bnd[1]): - error_case = 4 - return error_case - - -def default_arg_from_bounds(bounds): - """ - Determine a default value from given bounds. - - Parameters - ---------- - bounds : list - bounds for the value. - - Returns - ------- - float - Default value in the given bounds. - """ - if bounds[0] > -np.inf and bounds[1] < np.inf: - return (bounds[0] + bounds[1]) / 2.0 - if bounds[0] > -np.inf: - return bounds[0] + 1.0 - if bounds[1] < np.inf: - return bounds[1] - 1.0 - return 0.0 # pragma: no cover - - -# outsourced routines - - -def spectral_rad_pdf(model, r): - """ - Spectral radians PDF of a model. - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - r : :class:`numpy.ndarray` - Given radii. - - Returns - ------- - :class:`numpy.ndarray` - PDF values. - - """ - r = np.asarray(np.abs(r), dtype=np.double) - if model.dim > 1: - r_gz = np.logical_not(np.isclose(r, 0)) - # to prevent numerical errors, we just calculate where r>0 - res = np.zeros_like(r, dtype=np.double) - res[r_gz] = rad_fac(model.dim, r[r_gz]) * np.abs( - model.spectral_density(r[r_gz]) - ) - else: - res = rad_fac(model.dim, r) * np.abs(model.spectral_density(r)) - # prevent numerical errors in hankel for small r values (set 0) - res[np.logical_not(np.isfinite(res))] = 0.0 - # prevent numerical errors in hankel for big r (set non-negative) - res = np.maximum(res, 0.0) - return res - - -def percentile_scale(model, per=0.9): - """ - Calculate the percentile scale of the isotrope model. - - This is the distance, where the given percentile of the variance - is reached by the variogram - - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - per : float, optional - Percentile to use. The default is 0.9. - - Raises - ------ - ValueError - When percentile is not in (0, 1). - - Returns - ------- - float - Percentile scale. - - """ - # check the given percentile - if not 0.0 < per < 1.0: - raise ValueError(f"percentile needs to be within (0, 1), got: {per}") - - # define a curve, that has its root at the wanted point - def curve(x): - return 1.0 - model.correlation(x) - per - - # take 'per * len_rescaled' as initial guess - return root(curve, per * model.len_rescaled)["x"][0] - - -def set_arg_bounds(model, check_args=True, **kwargs): - r"""Set bounds for the parameters of the model. - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - check_args : bool, optional - Whether to check if the arguments are in their valid bounds. - In case not, a proper default value will be determined. - Default: True - **kwargs - Parameter name as keyword ("var", "len_scale", "nugget", ) - and a list of 2 or 3 values as value: - - * ``[a, b]`` or - * ``[a, b, ]`` - - is one of ``"oo"``, ``"cc"``, ``"oc"`` or ``"co"`` - to define if the bounds are open ("o") or closed ("c"). - """ - # if variance needs to be resetted, do this at last - var_bnds = [] - for arg, bounds in kwargs.items(): - if not check_bounds(bounds): - raise ValueError( - f"Given bounds for '{arg}' are not valid, got: {bounds}" - ) - if arg in model.opt_arg: - model._opt_arg_bounds[arg] = bounds - elif arg == "var": - var_bnds = bounds - continue - elif arg == "len_scale": - model.len_scale_bounds = bounds - elif arg == "nugget": - model.nugget_bounds = bounds - elif arg == "anis": - model.anis_bounds = bounds - else: - raise ValueError(f"set_arg_bounds: unknown argument '{arg}'") - if check_args and check_arg_in_bounds(model, arg) > 0: - def_arg = default_arg_from_bounds(bounds) - if arg == "anis": - setattr(model, arg, [def_arg] * (model.dim - 1)) - else: - setattr(model, arg, def_arg) - # set var last like always - if var_bnds: - model.var_bounds = var_bnds - if check_args and check_arg_in_bounds(model, "var") > 0: - model.var = default_arg_from_bounds(var_bnds) - - -def check_arg_bounds(model): - """ - Check arguments to be within their given bounds. - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - - Raises - ------ - ValueError - When an argument is not in its valid bounds. - """ - # check var, len_scale, nugget and optional-arguments - for arg in model.arg_bounds: - if not model.arg_bounds[arg]: - continue # no bounds given during init (called from self.dim) - bnd = list(model.arg_bounds[arg]) - val = getattr(model, arg) - error_case = check_arg_in_bounds(model, arg) - if error_case == 1: - raise ValueError(f"{arg} needs to be >= {bnd[0]}, got: {val}") - if error_case == 2: - raise ValueError(f"{arg} needs to be > {bnd[0]}, got: {val}") - if error_case == 3: - raise ValueError(f"{arg} needs to be <= {bnd[1]}, got: {val}") - if error_case == 4: - raise ValueError(f"{arg} needs to be < {bnd[1]}, got: {val}") - - -def set_dim(model, dim): - """ - Set the dimension in the given model. - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - dim : :class:`int` - dimension of the model. - - Raises - ------ - ValueError - When dimension is < 1. - """ - # check if a fixed dimension should be used - if model.fix_dim() is not None and model.fix_dim() != dim: - warnings.warn( - f"{model.name}: using fixed dimension {model.fix_dim()}", - AttributeWarning, - ) - dim = model.fix_dim() - if model.latlon and dim != (3 + int(model.temporal)): - raise ValueError( - f"{model.name}: using fixed dimension {model.fix_dim()}, " - f"which is not compatible with a latlon model (with temporal={model.temporal})." - ) - # force dim=3 (or 4 when temporal=True) for latlon models - dim = (3 + int(model.temporal)) if model.latlon else dim - # set the dimension - if dim < 1: - raise ValueError("Only dimensions of d >= 1 are supported.") - if not model.check_dim(dim): - warnings.warn( - f"Dimension {dim} is not appropriate for this model.", - AttributeWarning, - ) - model._dim = int(dim) - # create fourier transform just once (recreate for dim change) - model._sft = SFT(ndim=model.dim, **model.hankel_kw) - # recalculate dimension related parameters - if model._anis is not None: - model._len_scale, model._anis = set_len_anis( - model.dim, model._len_scale, model._anis - ) - if model._angles is not None: - model._angles = set_model_angles( - model.dim, model._angles, model.latlon, model.temporal - ) - model.check_arg_bounds() - - -def compare(this, that): - """ - Compare CovModels. - - Parameters - ---------- - this / that : :any:`CovModel` - The covariance models to compare. - """ - # prevent attribute error in opt_arg if the are not equal - if set(this.opt_arg) != set(that.opt_arg): - return False - # prevent dim error in anis and angles - if this.dim != that.dim: - return False - equal = True - equal &= this.name == that.name - equal &= np.isclose(this.var, that.var) - equal &= np.isclose(this.var_raw, that.var_raw) # ?! needless? - equal &= np.isclose(this.nugget, that.nugget) - equal &= np.isclose(this.len_scale, that.len_scale) - equal &= np.all(np.isclose(this.anis, that.anis)) - equal &= np.all(np.isclose(this.angles, that.angles)) - equal &= np.isclose(this.rescale, that.rescale) - equal &= this.latlon == that.latlon - equal &= this.temporal == that.temporal - for opt in this.opt_arg: - equal &= np.isclose(getattr(this, opt), getattr(that, opt)) - return equal - - -def model_repr(model): # pragma: no cover - """ - Generate the model string representation. - - Parameters - ---------- - model : :any:`CovModel` - The covariance model in use. - """ - m = model - p = model._prec - opt_str = "" - t_str = ", temporal=True" if m.temporal else "" - if not np.isclose(m.rescale, m.default_rescale()): - opt_str += f", rescale={m.rescale:.{p}}" - for opt in m.opt_arg: - opt_str += f", {opt}={getattr(m, opt):.{p}}" - if m.latlon: - ani_str = ( - "" - if m.is_isotropic or not m.temporal - else f", anis={m.anis[-1]:.{p}}" - ) - r_str = ( - "" - if np.isclose(m.geo_scale, 1) - else f", geo_scale={m.geo_scale:.{p}}" - ) - repr_str = ( - f"{m.name}(latlon={m.latlon}{t_str}, var={m.var:.{p}}, " - f"len_scale={m.len_scale:.{p}}, nugget={m.nugget:.{p}}" - f"{ani_str}{r_str}{opt_str})" - ) - else: - # only print anis and angles if model is anisotropic or rotated - ani_str = "" if m.is_isotropic else f", anis={list_format(m.anis, p)}" - ang_str = ( - f", angles={list_format(m.angles, p)}" if m.do_rotation else "" - ) - repr_str = ( - f"{m.name}(dim={m.spatial_dim}{t_str}, var={m.var:.{p}}, " - f"len_scale={m.len_scale:.{p}}, nugget={m.nugget:.{p}}" - f"{ani_str}{ang_str}{opt_str})" - ) - return repr_str diff --git a/src/gstools_cython/covmodel/tpl_models.py b/src/gstools_cython/covmodel/tpl_models.py deleted file mode 100644 index b728e7b9..00000000 --- a/src/gstools_cython/covmodel/tpl_models.py +++ /dev/null @@ -1,570 +0,0 @@ -""" -GStools subpackage providing truncated power law covariance models. - -.. currentmodule:: gstools.covmodel.tpl_models - -The following classes and functions are provided - -.. autosummary:: - TPLGaussian - TPLExponential - TPLStable - TPLSimple -""" - -# pylint: disable=C0103, E1101 -import warnings - -import numpy as np - -from gstools.covmodel.base import CovModel -from gstools.covmodel.tools import AttributeWarning -from gstools.tools.special import ( - tpl_exp_spec_dens, - tpl_gau_spec_dens, - tplstable_cor, -) - -__all__ = ["TPLGaussian", "TPLExponential", "TPLStable", "TPLSimple"] - - -class TPLCovModel(CovModel): - """Truncated-Power-Law Covariance Model base class for super-position.""" - - @property - def len_up(self): - """:class:`float`: Upper length scale truncation of the model. - - * ``len_up = len_low + len_scale`` - """ - return self.len_low + self.len_scale - - @property - def len_up_rescaled(self): - """:class:`float`: Upper length scale truncation rescaled. - - * ``len_up_rescaled = (len_low + len_scale) / rescale`` - """ - return self.len_up / self.rescale - - @property - def len_low_rescaled(self): - """:class:`float`: Lower length scale truncation rescaled. - - * ``len_low_rescaled = len_low / rescale`` - """ - return self.len_low / self.rescale - - def var_factor(self): - """Factor for C (intensity of variation) to result in variance.""" - return ( - self.len_up_rescaled ** (2 * self.hurst) - - self.len_low_rescaled ** (2 * self.hurst) - ) / (2 * self.hurst) - - def cor(self, h): - """TPL - normalized correlation function.""" - - def correlation(self, r): - """TPL - correlation function.""" - - -# Truncated power law ######################################################### - - -class TPLGaussian(TPLCovModel): - r"""Truncated-Power-Law with Gaussian modes. - - Notes - ----- - The truncated power law is given by a superposition of scale-dependent - variograms [Federico1997]_: - - .. math:: - \gamma_{\ell_{\mathrm{low}},\ell_{\mathrm{up}}}(r) = - \intop_{\ell_{\mathrm{low}}}^{\ell_{\mathrm{up}}} - \gamma(r,\lambda) \frac{\rm d \lambda}{\lambda} - - with `Gaussian` modes on each scale: - - .. math:: - \gamma(r,\lambda) &= - \sigma^2(\lambda)\cdot\left(1- - \exp\left[- \left(\frac{r}{\lambda}\right)^{2}\right] - \right)\\ - \sigma^2(\lambda) &= C\cdot\lambda^{2H} - - This results in: - - .. math:: - \gamma_{\ell_{\mathrm{low}},\ell_{\mathrm{up}}}(r) &= - \sigma^2_{\ell_{\mathrm{low}},\ell_{\mathrm{up}}}\cdot\left(1- - H \cdot - \frac{\ell_{\mathrm{up}}^{2H} \cdot - E_{1+H} - \left[\left(\frac{r}{\ell_{\mathrm{up}}}\right)^{2}\right] - - \ell_{\mathrm{low}}^{2H} \cdot - E_{1+H} - \left[\left(\frac{r}{\ell_{\mathrm{low}}}\right)^{2}\right]} - {\ell_{\mathrm{up}}^{2H}-\ell_{\mathrm{low}}^{2H}} - \right) \\ - \sigma^2_{\ell_{\mathrm{low}},\ell_{\mathrm{up}}} &= - \frac{C\cdot\left(\ell_{\mathrm{up}}^{2H} - -\ell_{\mathrm{low}}^{2H}\right)}{2H} - - The "length scale" of this model is equivalent by the integration range: - - .. math:: - \ell = \ell_{\mathrm{up}} -\ell_{\mathrm{low}} - - If you want to define an upper scale truncation, you should set ``len_low`` - and ``len_scale`` accordingly. - - The following Parameters occur: - - * :math:`C>0` : - scaling factor from the Power-Law (intensity of variation) - This parameter will be calculated internally by the given variance. - You can access C directly by ``model.var_raw`` - * :math:`00` : - scaling factor from the Power-Law (intensity of variation) - This parameter will be calculated internally by the given variance. - You can access C directly by ``model.var_raw`` - * :math:`00` : - scaling factor from the Power-Law (intensity of variation) - This parameter will be calculated internally by the given variance. - You can access C directly by ``model.var_raw`` - * :math:`0 1 and value.size != dim: # vector mean - raise ValueError(f"Mean/Trend: Wrong size ({value})") - return value if value.size > 1 else value.item() - - -class Field: - """A base class for random fields, kriging fields, etc. - - Parameters - ---------- - model : :any:`CovModel`, optional - Covariance Model related to the field. - value_type : :class:`str`, optional - Value type of the field. Either "scalar" or "vector". - The default is "scalar". - mean : :any:`None` or :class:`float` or :any:`callable`, optional - Mean of the field if wanted. Could also be a callable. - The default is None. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the field. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - Trend of the denormalized fields. If no normalizer is applied, - this behaves equal to 'mean'. - The default is None. - dim : :any:`None` or :class:`int`, optional - Dimension of the field if no model is given. - """ - - valid_value_types = ["scalar", "vector"] - """:class:`list` of :class:`str`: valid field value types.""" - - default_field_names = ["field"] - """:class:`list`: Default field names.""" - - def __init__( - self, - model=None, - value_type="scalar", - mean=None, - normalizer=None, - trend=None, - dim=None, - ): - # initialize attributes - self._mesh_type = "unstructured" # default - self._pos = None - self._field_shape = None - self._field_names = [] - self._model = None - self._value_type = None - self._mean = None - self._normalizer = None - self._trend = None - self._dim = dim if dim is None else int(dim) - # set properties - self.model = model - self.value_type = value_type - self.mean = mean - self.normalizer = normalizer - self.trend = trend - - def __len__(self): - return len(self.field_names) - - def __contains__(self, item): - return item in self.field_names - - def __getitem__(self, key): - if key in self.field_names: - return getattr(self, key) - if isinstance(key, int): - return self[self.field_names[key]] - if isinstance(key, slice): - return [self[f] for f in self.field_names[key]] - if isinstance(key, Iterable) and not isinstance(key, str): - return [self[f] for f in key] - raise KeyError(f"{self.name}: requested field '{key}' not present") - - def __delitem__(self, key): - names = [] - if key in self.field_names: - names = [key] - elif isinstance(key, int): - names = [self.field_names[key]] - elif isinstance(key, slice): - names = self.field_names[key] - elif isinstance(key, Iterable) and not isinstance(key, str): - for k in key: - k = self.field_names[k] if isinstance(key, int) else k - names.append(k) - else: - raise KeyError(f"{self.name}: requested field '{key}' not present") - for name in names: - if name not in self.field_names: - raise KeyError( - f"{self.name}: requested field '{name}' not present" - ) - delattr(self, name) - del self._field_names[self._field_names.index(name)] - - def __call__( - self, - pos=None, - field=None, - mesh_type="unstructured", - post_process=True, - store=True, - ): - """Generate the field. - - Parameters - ---------- - pos : :class:`list`, optional - the position tuple, containing main direction and transversal - directions - field : :class:`numpy.ndarray` or :any:`None`, optional - the field values. Will be all zeros if :any:`None` is given. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured'. Default: 'unstructured' - post_process : :class:`bool`, optional - Whether to apply mean, normalizer and trend to the field. - Default: `True` - store : :class:`str` or :class:`bool`, optional - Whether to store field (True/False) with default name - or with specified name. - The default is :any:`True` for default name "field". - - Returns - ------- - field : :class:`numpy.ndarray` - the field values. - """ - name, save = self.get_store_config(store) - pos, shape = self.pre_pos(pos, mesh_type) - if field is None: - field = np.zeros(shape, dtype=np.double) - else: - field = np.asarray(field, dtype=np.double).reshape(shape) - return self.post_field(field, name, post_process, save) - - def structured(self, *args, **kwargs): - """Generate a field on a structured mesh. - - See :any:`__call__` - """ - if self.pos is None: - self.mesh_type = "structured" - if not (args or "pos" in kwargs) and self.mesh_type == "unstructured": - raise ValueError("Field.structured: can't reuse present 'pos'") - call = partial(self.__call__, mesh_type="structured") - return call(*args, **kwargs) - - def unstructured(self, *args, **kwargs): - """Generate a field on an unstructured mesh. - - See :any:`__call__` - """ - if self.pos is None: - self.mesh_type = "unstructured" - if not (args or "pos" in kwargs) and self.mesh_type != "unstructured": - raise ValueError("Field.unstructured: can't reuse present 'pos'") - call = partial(self.__call__, mesh_type="unstructured") - return call(*args, **kwargs) - - def mesh( - self, mesh, points="centroids", direction="all", name="field", **kwargs - ): - """Generate a field on a given meshio, ogs5py or PyVista mesh. - - Parameters - ---------- - mesh : meshio.Mesh or ogs5py.MSH or PyVista mesh - The given mesh - points : :class:`str`, optional - The points to evaluate the field at. - Either the "centroids" of the mesh cells - (calculated as mean of the cell vertices) or the "points" - of the given mesh. - Default: "centroids" - direction : :class:`str` or :class:`list`, optional - Here you can state which direction should be chosen for - lower dimension. For example, if you got a 2D mesh in xz direction, - you have to pass "xz". By default, all directions are used. - One can also pass a list of indices. - Default: "all" - name : :class:`str` or :class:`list` of :class:`str`, optional - Name(s) to store the field(s) in the given mesh as point_data or - cell_data. If to few names are given, digits will be appended. - Default: "field" - **kwargs - Keyword arguments forwarded to :any:`__call__`. - - Notes - ----- - This will store the field in the given mesh under the given name, - if a meshio or PyVista mesh was given. - - See: - - meshio: https://github.com/nschloe/meshio - - ogs5py: https://github.com/GeoStat-Framework/ogs5py - - PyVista: https://github.com/pyvista/pyvista - """ - return generate_on_mesh(self, mesh, points, direction, name, **kwargs) - - def pre_pos(self, pos=None, mesh_type="unstructured", info=False): - """ - Preprocessing positions and mesh_type. - - Parameters - ---------- - pos : :any:`iterable` - the position tuple, containing main direction and transversal - directions - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: `"unstructured"` - info : :class:`bool`, optional - Whether to return information - - Returns - ------- - iso_pos : (d, n), :class:`numpy.ndarray` - Isometrized position tuple. - shape : :class:`tuple` - Shape of the resulting field. - info : :class:`dict`, optional - Information about settings. - - Warnings - -------- - When setting a new position tuple that differs from the present one, - all stored fields will be deleted. - """ - info_ret = {"deleted": False} - if pos is None: - if self.pos is None: - raise ValueError("Field: no position tuple 'pos' present") - else: - info_ret = self.set_pos(pos, mesh_type, info=True) - if self.mesh_type != "unstructured": - pos = generate_grid(self.pos) - else: - pos = self.pos - # return isometrized pos tuple, field shape and possible info - info_ret = (info_ret,) - if self.model is None: - return (pos, self.field_shape) + info * info_ret - return (self.model.isometrize(pos), self.field_shape) + info * info_ret - - def post_field(self, field, name="field", process=True, save=True): - """ - Postprocessing field values. - - Parameters - ---------- - field : :class:`numpy.ndarray` - Field values. - name : :class:`str`, optional - Name. to store the field. - The default is "field". - process : :class:`bool`, optional - Whether to process field to apply mean, normalizer and trend. - The default is True. - save : :class:`bool`, optional - Whether to store the field under the given name. - The default is True. - - Returns - ------- - field : :class:`numpy.ndarray` - Processed field values. - """ - if self.field_shape is None: - raise ValueError("post_field: no 'field_shape' present.") - field = np.asarray(field, dtype=np.double).reshape(self.field_shape) - if process: - field = apply_mean_norm_trend( - pos=self.pos, - field=field, - mesh_type=self.mesh_type, - value_type=self.value_type, - mean=self.mean, - normalizer=self.normalizer, - trend=self.trend, - check_shape=False, - stacked=False, - ) - if save: - name = str(name) - if not name.isidentifier() or ( - name not in self.field_names and name in dir(self) - ): - raise ValueError( - f"Field: given field name '{name}' is not valid" - ) - # allow resetting present fields - if name not in self._field_names: - self._field_names.append(name) - setattr(self, name, field) - return field - - def delete_fields(self, select=None): - """Delete selected fields.""" - del self[self.field_names if select is None else select] - - def transform( - self, method, field="field", store=True, process=False, **kwargs - ): - """ - Apply field transformation. - - Parameters - ---------- - method : :class:`str` - Method to use. - See :py:mod:`gstools.transform` for available transformations. - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - **kwargs - Keyword arguments forwarded to selected method. - - Raises - ------ - ValueError - When method is unknown. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - return apply( - self, method, field=field, store=store, process=process, **kwargs - ) - - def to_pyvista( - self, field_select="field", fieldname="field" - ): # pragma: no cover - """Create a VTK/PyVista grid of the stored field. - - Parameters - ---------- - field_select : :class:`str`, optional - Field that should be stored. Can be: - "field", "raw_field", "krige_field", "err_field" or "krige_var". - Default: "field" - fieldname : :class:`str`, optional - Name of the field in the VTK file. Default: "field" - """ - grid = to_vtk_helper( - self, filename=None, field_select=field_select, fieldname=fieldname - ) - return grid - - def vtk_export( - self, filename, field_select="field", fieldname="field" - ): # pragma: no cover - """Export the stored field to vtk. - - Parameters - ---------- - filename : :class:`str` - Filename of the file to be saved, including the path. Note that an - ending (.vtr or .vtu) will be added to the name. - field_select : :class:`str`, optional - Field that should be stored. Can be: - "field", "raw_field", "krige_field", "err_field" or "krige_var". - Default: "field" - fieldname : :class:`str`, optional - Name of the field in the VTK file. Default: "field" - """ - if not isinstance(filename, str): - raise TypeError("Please use a string filename.") - return to_vtk_helper( - self, - filename=filename, - field_select=field_select, - fieldname=fieldname, - ) - - def plot( - self, field="field", fig=None, ax=None, **kwargs - ): # pragma: no cover - """ - Plot the spatial random field. - - Parameters - ---------- - field : :class:`str`, optional - Field that should be plotted. - Default: "field" - fig : :class:`Figure` or :any:`None` - Figure to plot the axes on. If `None`, a new one will be created. - Default: `None` - ax : :class:`Axes` or :any:`None` - Axes to plot on. If `None`, a new one will be added to the figure. - Default: `None` - **kwargs - Forwarded to the plotting routine. - """ - # just import if needed; matplotlib is not required by setup - from gstools.field.plot import plot_field, plot_vec_field - - if self.value_type is None: - raise ValueError( - "Field value type not set! " - "Specify 'scalar' or 'vector' before plotting." - ) - - if self.value_type == "scalar": - r = plot_field(self, field, fig, ax, **kwargs) - elif self.value_type == "vector": - if self.dim == 2: - r = plot_vec_field(self, field, fig, ax, **kwargs) - else: - raise NotImplementedError( - "Streamflow plotting only supported for 2d case." - ) - else: - raise ValueError(f"Unknown field value type: {self.value_type}") - - return r - - def set_pos(self, pos, mesh_type="unstructured", info=False): - """ - Set positions and mesh_type. - - Parameters - ---------- - pos : :any:`iterable` - the position tuple, containing main direction and transversal - directions - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: `"unstructured"` - info : :class:`bool`, optional - Whether to return information - - Returns - ------- - info : :class:`dict`, optional - Information about settings. - - Warnings - -------- - When setting a new position tuple that differs from the present one, - all stored fields will be deleted. - """ - info_ret = {"deleted": False} - old_type = copy(self.mesh_type) - old_pos = copy(self.pos) - # save pos and mesh-type - self.mesh_type = mesh_type - self.pos = pos - # remove present fields if new pos is different from current - if old_type != self.mesh_type or not _pos_equal(old_pos, self.pos): - self.delete_fields() - info_ret["deleted"] = True - del old_pos - return info_ret if info else None - - def get_store_config(self, store, default=None, fld_cnt=None): - """ - Get storage configuration from given selection. - - Parameters - ---------- - store : :class:`str` or :class:`bool` or :class:`list`, optional - Whether to store fields (True/False) with default names - or with specified names. - The default is :any:`True` for default names. - default : :class:`str` or :class:`list`, optional - Default field names. The default is "field". - fld_cnt : :any:`None` or :class:`int`, optional - Number of fields when using lists. The default is None. - - Returns - ------- - name : :class:`str` or :class:`list` - Name(s) of field. - save : :class:`bool` or :class:`list` - Whether to save field(s). - """ - if default is None: - if fld_cnt is None: - default = self.default_field_names[0] - else: - default = self.default_field_names - # single field - if fld_cnt is None: - save = isinstance(store, str) or bool(store) - name = store if isinstance(store, str) else default - return name, save - # multiple fields - default = _names(default, fld_cnt) - save = [True] * fld_cnt - if isinstance(store, str): - store = [store] - if isinstance(store, Iterable): - store = list(store)[:fld_cnt] - store += [True] * (fld_cnt - len(store)) - name = [None] * fld_cnt - for i, val in enumerate(store): - save[i] = isinstance(val, str) or bool(val) - name[i] = val if isinstance(val, str) else default[i] - else: - save = [bool(store)] * fld_cnt - name = copy(default) - return name, save - - @property - def pos(self): - """:class:`tuple`: The position tuple of the field.""" - return self._pos - - @pos.setter - def pos(self, pos): - if self.mesh_type == "unstructured": - self._pos = np.asarray(pos, dtype=np.double).reshape(self.dim, -1) - self._field_shape = np.shape(self._pos[0]) - else: - self._pos, self._field_shape = format_struct_pos_dim(pos, self.dim) - # prepend dimension if we have a vector field - if self.value_type == "vector": - self._field_shape = (self.dim,) + self._field_shape - if self.latlon: - raise ValueError("Field: Vector fields not allowed for latlon") - - @property - def all_fields(self): - """:class:`list`: All fields as stacked list.""" - return self[self.field_names] - - @property - def field_names(self): - """:class:`list`: Names of present fields.""" - return self._field_names - - @field_names.deleter - def field_names(self): - self.delete_fields() - - @property - def field_shape(self): - """:class:`tuple`: The shape of the field.""" - return self._field_shape - - @property - def mesh_type(self): - """:class:`str`: The mesh type of the field.""" - return self._mesh_type - - @mesh_type.setter - def mesh_type(self, mesh_type): - self._mesh_type = str(mesh_type) - - @property - def model(self): - """:any:`CovModel`: The covariance model of the field.""" - return self._model - - @model.setter - def model(self, model): - if model is not None: - if not isinstance(model, CovModel): - raise ValueError( - "Field: 'model' is not an instance of 'gstools.CovModel'" - ) - self._model = model - self._dim = None - elif self._dim is None: - raise ValueError("Field: either needs 'model' or 'dim'.") - else: - self._model = None - - @property - def mean(self): - """:class:`float` or :any:`callable`: The mean of the field.""" - return self._mean - - @mean.setter - def mean(self, mean): - self._mean = _set_mean_trend(mean, self.dim) - - @property - def normalizer(self): - """:any:`Normalizer`: Normalizer of the field.""" - return self._normalizer - - @normalizer.setter - def normalizer(self, normalizer): - self._normalizer = _check_normalizer(normalizer) - - @property - def trend(self): - """:class:`float` or :any:`callable`: The trend of the field.""" - return self._trend - - @trend.setter - def trend(self, trend): - self._trend = _set_mean_trend(trend, self.dim) - - @property - def value_type(self): - """:class:`str`: Type of the field values (scalar, vector).""" - return self._value_type - - @value_type.setter - def value_type(self, value_type): - if value_type not in self.valid_value_types: - raise ValueError( - f"Field: value type not in {self.valid_value_types}" - ) - self._value_type = value_type - - @property - def dim(self): - """:class:`int`: Dimension of the field.""" - return self._dim if self.model is None else self.model.field_dim - - @property - def latlon(self): - """:class:`bool`: Whether the field depends on geographical coords.""" - return False if self.model is None else self.model.latlon - - @property - def temporal(self): - """:class:`bool`: Whether the field depends on time.""" - return False if self.model is None else self.model.temporal - - @property - def name(self): - """:class:`str`: The name of the class.""" - return self.__class__.__name__ - - def _fmt_mean_norm_trend(self): - # fmt_mean_norm_trend for all child classes - return fmt_mean_norm_trend(self) - - def __repr__(self): - """Return String representation.""" - if self.model is None: - dim_str = f"dim={self.dim}" - else: - dim_str = f"model={self.model.name}" - return ( - f"{self.name}({dim_str}, " - f"value_type='{self.value_type}'{self._fmt_mean_norm_trend()})" - ) diff --git a/src/gstools_cython/field/cond_srf.py b/src/gstools_cython/field/cond_srf.py deleted file mode 100644 index c3e03fe2..00000000 --- a/src/gstools_cython/field/cond_srf.py +++ /dev/null @@ -1,313 +0,0 @@ -""" -GStools subpackage providing a class for conditioned spatial random fields. - -.. currentmodule:: gstools.field.cond_srf - -The following classes are provided - -.. autosummary:: - CondSRF -""" - -# pylint: disable=C0103, W0231, W0221, W0222, E1102 - -import numpy as np - -from gstools.field.base import Field -from gstools.field.generator import Generator, RandMeth -from gstools.krige import Krige - -__all__ = ["CondSRF"] - -GENERATOR = { - "RandMeth": RandMeth, -} -"""dict: Standard generators for conditioned spatial random fields.""" - - -class CondSRF(Field): - """A class to generate conditioned spatial random fields (SRF). - - Parameters - ---------- - krige : :any:`Krige` - Kriging setup to condition the spatial random field. - generator : :class:`str` or :any:`Generator`, optional - Name or class of the field generator to be used. - At the moment, only the following generator is provided: - - * "RandMeth" : The Randomization Method. - See: :any:`RandMeth` - - Default: "RandMeth" - **generator_kwargs - Keyword arguments that are forwarded to the generator in use. - Have a look at the provided generators for further information. - """ - - valid_value_types = ["scalar"] - """:class:`list` of :class:`str`: valid field value types.""" - - default_field_names = ["field", "raw_field", "raw_krige"] - """:class:`list`: Default field names.""" - - def __init__(self, krige, generator="RandMeth", **generator_kwargs): - if not isinstance(krige, Krige): - raise ValueError("CondSRF: krige should be an instance of Krige.") - self._krige = krige - # initialize attributes - self._field_names = [] - # initialize private attributes - self._generator = None - # initialize attributes - self.set_generator(generator, **generator_kwargs) - - def __call__( - self, - pos=None, - seed=np.nan, - mesh_type="unstructured", - post_process=True, - store=True, - krige_store=True, - **kwargs, - ): - """Generate the conditioned spatial random field. - - The field is saved as `self.field` and is also returned. - - Parameters - ---------- - pos : :class:`list`, optional - the position tuple, containing main direction and transversal - directions - seed : :class:`int`, optional - seed for RNG for resetting. Default: keep seed from generator - mesh_type : :class:`str` - 'structured' / 'unstructured' - post_process : :class:`bool`, optional - Whether to apply mean, normalizer and trend to the field. - Default: `True` - store : :class:`str` or :class:`bool` or :class:`list`, optional - Whether to store fields (True/False) with default names - or with specified names. - The default is :any:`True` for default names - ["field", "raw_field", "raw_krige"]. - krige_store : :class:`str` or :class:`bool` or :class:`list`, optional - Whether to store kriging fields (True/False) with default name - or with specified names. - The default is :any:`True` for default names - ["field", "krige_var"]. - **kwargs - keyword arguments that are forwarded to the kriging routine in use. - - Returns - ------- - field : :class:`numpy.ndarray` - the conditioned SRF - """ - name, save = self.get_store_config(store=store, fld_cnt=3) - krige_name, krige_save = self.krige.get_store_config( - store=krige_store, fld_cnt=2 - ) - kwargs["mesh_type"] = mesh_type - kwargs["only_mean"] = False # overwrite if given - kwargs["return_var"] = True # overwrite if given - kwargs["post_process"] = False # overwrite if given - kwargs["store"] = [False, krige_name[1] if krige_save[1] else False] - # update the model/seed in the generator if any changes were made - self.generator.update(self.model, seed) - # get isometrized positions and the resulting field-shape - iso_pos, shape, info = self.pre_pos(pos, mesh_type, info=True) - # generate the field - rawfield = np.reshape(self.generator(iso_pos, add_nugget=False), shape) - # call krige on already set pos (reuse already calculated fields) - if ( - not info["deleted"] - and name[2] in self.field_names - and krige_name[1] in self.krige.field_names - ): - reuse = True - rawkrige, krige_var = self[name[2]], self.krige[krige_name[1]] - else: - reuse = False - rawkrige, krige_var = self.krige(**kwargs) - var_scale, nugget = self.get_scaling(krige_var, shape) - # store krige field (need a copy to not alter field by reference) - if not reuse or krige_name[0] not in self.krige.field_names: - self.krige.post_field( - rawkrige.copy(), krige_name[0], post_process, krige_save[0] - ) - # store raw krige field - if not reuse: - self.post_field(rawkrige, name[2], False, save[2]) - # store raw random field - self.post_field(rawfield, name[1], False, save[1]) - # store cond random field - return self.post_field( - field=rawkrige + var_scale * rawfield + nugget, - name=name[0], - process=post_process, - save=save[0], - ) - - def get_scaling(self, krige_var, shape): - """ - Get scaling coefficients for the random field. - - Parameters - ---------- - krige_var : :class:`numpy.ndarray` - Kriging variance. - shape : :class:`tuple` of :class:`int` - Field shape. - - Returns - ------- - var_scale : :class:`numpy.ndarray` - Variance scaling factor for the random field. - nugget : :class:`numpy.ndarray` or :class:`int` - Nugget to be added to the field. - """ - if self.model.nugget > 0: - var_scale = np.maximum(krige_var - self.model.nugget, 0) - nug_scale = np.sqrt((krige_var - var_scale) / self.model.nugget) - var_scale = np.sqrt(var_scale / self.model.var) - nugget = nug_scale * self.generator.get_nugget(shape) - else: - var_scale = np.sqrt(krige_var / self.model.var) - nugget = 0 - return var_scale, nugget - - def set_generator(self, generator, **generator_kwargs): - """Set the generator for the field. - - Parameters - ---------- - generator : :class:`str` or :any:`Generator`, optional - Name or class of the generator to use for field generation. - Default: "RandMeth" - **generator_kwargs - keyword arguments that are forwarded to the generator in use. - """ - gen = GENERATOR[generator] if generator in GENERATOR else generator - if not (isinstance(gen, type) and issubclass(gen, Generator)): - raise ValueError( - f"gstools.CondSRF: Unknown or wrong generator: {generator}" - ) - self._generator = gen(self.model, **generator_kwargs) - self.value_type = self.generator.value_type - - def set_pos(self, pos, mesh_type="unstructured", info=False): - """ - Set positions and mesh_type. - - Parameters - ---------- - pos : :any:`iterable` - the position tuple, containing main direction and transversal - directions - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: `"unstructured"` - info : :class:`bool`, optional - Whether to return information - - Returns - ------- - info : :class:`dict`, optional - Information about settings. - - Warnings - -------- - When setting a new position tuple that differs from the present one, - all stored fields will be deleted. - """ - info_ret = super().set_pos(pos, mesh_type, info=True) - if info_ret["deleted"]: - self.krige.delete_fields() - return info_ret if info else None - - @property - def pos(self): - """:class:`tuple`: The position tuple of the field.""" - return self.krige.pos - - @pos.setter - def pos(self, pos): - self.krige.pos = pos - - @property - def field_shape(self): - """:class:`tuple`: The shape of the field.""" - return self.krige.field_shape - - @property - def mesh_type(self): - """:class:`str`: The mesh type of the field.""" - return self.krige.mesh_type - - @mesh_type.setter - def mesh_type(self, mesh_type): - self.krige.mesh_type = mesh_type - - @property - def krige(self): - """:any:`Krige`: The underlying kriging class.""" - return self._krige - - @property - def generator(self): - """:any:`callable`: The generator of the field.""" - return self._generator - - @property - def model(self): - """:any:`CovModel`: The covariance model of the field.""" - return self.krige.model - - @model.setter - def model(self, model): - self.krige.model = model - - @property - def mean(self): - """:class:`float` or :any:`callable`: The mean of the field.""" - return self.krige.mean - - @mean.setter - def mean(self, mean): - self.krige.mean = mean - - @property - def normalizer(self): - """:any:`Normalizer`: Normalizer of the field.""" - return self.krige.normalizer - - @normalizer.setter - def normalizer(self, normalizer): - self.krige.normalizer = normalizer - - @property - def trend(self): - """:class:`float` or :any:`callable`: The trend of the field.""" - return self.krige.trend - - @trend.setter - def trend(self, trend): - self.krige.trend = trend - - @property - def value_type(self): - """:class:`str`: Type of the field values (scalar, vector).""" - return self.krige.value_type - - @value_type.setter - def value_type(self, value_type): - self.krige.value_type = value_type - - def __repr__(self): - """Return String representation.""" - return ( - f"{self.name}(krige={self.krige}, generator={self.generator.name})" - ) diff --git a/src/gstools_cython/field/generator.py b/src/gstools_cython/field/generator.py deleted file mode 100644 index 5beab10d..00000000 --- a/src/gstools_cython/field/generator.py +++ /dev/null @@ -1,534 +0,0 @@ -""" -GStools subpackage providing generators for spatial random fields. - -.. currentmodule:: gstools.field.generator - -The following classes are provided - -.. autosummary:: - :toctree: - - Generator - RandMeth - IncomprRandMeth -""" - -# pylint: disable=C0103, W0222, C0412, W0231 -import warnings -from abc import ABC, abstractmethod -from copy import deepcopy as dcp - -import numpy as np - -from gstools import config -from gstools.covmodel.base import CovModel -from gstools.random.rng import RNG - -if config.USE_RUST: # pragma: no cover - # pylint: disable=E0401 - from gstools_core import summate, summate_incompr -else: - from gstools.field.summator import summate, summate_incompr - -__all__ = ["Generator", "RandMeth", "IncomprRandMeth"] - - -SAMPLING = ["auto", "inversion", "mcmc"] - - -class Generator(ABC): - """ - Abstract generator class. - - Parameters - ---------- - model : :any:`CovModel` - Covariance model - **kwargs - Placeholder for keyword-args - """ - - @abstractmethod - def __init__(self, model, **kwargs): - pass - - @abstractmethod - def update(self, model=None, seed=np.nan): - """Update the model and the seed. - - If model and seed are not different, nothing will be done. - - Parameters - ---------- - model : :any:`CovModel` or :any:`None`, optional - covariance model. Default: :any:`None` - seed : :class:`int` or :any:`None` or :any:`numpy.nan`, optional - the seed of the random number generator. - If :any:`None`, a random seed is used. If :any:`numpy.nan`, - the actual seed will be kept. Default: :any:`numpy.nan` - """ - - @abstractmethod - def get_nugget(self, shape): - """ - Generate normal distributed values for the nugget simulation. - - Parameters - ---------- - shape : :class:`tuple` - the shape of the summed modes - - Returns - ------- - nugget : :class:`numpy.ndarray` - the nugget in the same shape as the summed modes - """ - - @abstractmethod - def __call__(self, pos, add_nugget=True): - """ - Generate the field. - - Parameters - ---------- - pos : (d, n), :class:`numpy.ndarray` - the position tuple with d dimensions and n points. - add_nugget : :class:`bool` - Whether to add nugget noise to the field. - - Returns - ------- - :class:`numpy.ndarray` - the random modes - """ - - @property - @abstractmethod - def value_type(self): - """:class:`str`: Type of the field values (scalar, vector).""" - - @property - def name(self): - """:class:`str`: Name of the generator.""" - return self.__class__.__name__ - - -class RandMeth(Generator): - r"""Randomization method for calculating isotropic random fields. - - Parameters - ---------- - model : :any:`CovModel` - Covariance model - mode_no : :class:`int`, optional - Number of Fourier modes. Default: ``1000`` - seed : :class:`int` or :any:`None`, optional - The seed of the random number generator. - If "None", a random seed is used. Default: :any:`None` - sampling : :class:`str`, optional - Sampling strategy. Either - - * "auto": select best strategy depending on given model - * "inversion": use inversion method - * "mcmc": use mcmc sampling - - **kwargs - Placeholder for keyword-args - - Notes - ----- - The Randomization method is used to generate isotropic - spatial random fields characterized by a given covariance model. - The calculation looks like [Hesse2014]_: - - .. math:: - u\left(x\right)= - \sqrt{\frac{\sigma^{2}}{N}}\cdot - \sum_{i=1}^{N}\left( - Z_{1,i}\cdot\cos\left(\left\langle k_{i},x\right\rangle \right)+ - Z_{2,i}\cdot\sin\left(\left\langle k_{i},x\right\rangle \right) - \right) - - where: - - * :math:`N` : fourier mode number - * :math:`Z_{j,i}` : random samples from a normal distribution - * :math:`k_i` : samples from the spectral density distribution of - the covariance model - - References - ---------- - .. [Hesse2014] Heße, F., Prykhodko, V., Schlüter, S., and Attinger, S., - "Generating random fields with a truncated power-law variogram: - A comparison of several numerical methods", - Environmental Modelling & Software, 55, 32-48., (2014) - """ - - def __init__( - self, - model, - *, - mode_no=1000, - seed=None, - sampling="auto", - **kwargs, - ): - if kwargs: - warnings.warn("gstools.RandMeth: **kwargs are ignored") - # initialize attributes - self._mode_no = int(mode_no) - # initialize private attributes - self._model = None - self._seed = None - self._rng = None - self._z_1 = None - self._z_2 = None - self._cov_sample = None - self._value_type = "scalar" - # set sampling strategy - self._sampling = None - self.sampling = sampling - # set model and seed - self.update(model, seed) - - def __call__(self, pos, add_nugget=True): - """Calculate the random modes for the randomization method. - - This method calls the `summate_*` Cython methods, which are the - heart of the randomization method. - - Parameters - ---------- - pos : (d, n), :class:`numpy.ndarray` - the position tuple with d dimensions and n points. - add_nugget : :class:`bool` - Whether to add nugget noise to the field. - - Returns - ------- - :class:`numpy.ndarray` - the random modes - """ - pos = np.asarray(pos, dtype=np.double) - summed_modes = summate( - self._cov_sample, self._z_1, self._z_2, pos, config.NUM_THREADS - ) - nugget = self.get_nugget(summed_modes.shape) if add_nugget else 0.0 - return np.sqrt(self.model.var / self._mode_no) * summed_modes + nugget - - def get_nugget(self, shape): - """ - Generate normal distributed values for the nugget simulation. - - Parameters - ---------- - shape : :class:`tuple` - the shape of the summed modes - - Returns - ------- - nugget : :class:`numpy.ndarray` - the nugget in the same shape as the summed modes - """ - if self.model.nugget > 0: - nugget = np.sqrt(self.model.nugget) * self._rng.random.normal( - size=shape - ) - else: - nugget = 0.0 - return nugget - - def update(self, model=None, seed=np.nan): - """Update the model and the seed. - - If model and seed are not different, nothing will be done. - - Parameters - ---------- - model : :any:`CovModel` or :any:`None`, optional - covariance model. Default: :any:`None` - seed : :class:`int` or :any:`None` or :any:`numpy.nan`, optional - the seed of the random number generator. - If :any:`None`, a random seed is used. If :any:`numpy.nan`, - the actual seed will be kept. Default: :any:`numpy.nan` - """ - # check if a new model is given - if isinstance(model, CovModel): - if self.model != model: - self._model = dcp(model) - if seed is None or not np.isnan(seed): - self.reset_seed(seed) - else: - self.reset_seed(self._seed) - # just update the seed, if its a new one - elif seed is None or not np.isnan(seed): - self.seed = seed - # or just update the seed, when no model is given - elif model is None and (seed is None or not np.isnan(seed)): - if isinstance(self._model, CovModel): - self.seed = seed - else: - raise ValueError( - "gstools.field.generator.RandMeth: no 'model' given" - ) - # if the user tries to trick us, we beat him! - elif model is None and np.isnan(seed): - if not ( - isinstance(self._model, CovModel) - and self._z_1 is not None - and self._z_2 is not None - and self._cov_sample is not None - ): - raise ValueError( - "gstools.field.generator.RandMeth: " - "neither 'model' nor 'seed' given!" - ) - # wrong model type - else: - raise ValueError( - "gstools.field.generator.RandMeth: 'model' is not an " - "instance of 'gstools.CovModel'" - ) - - def reset_seed(self, seed=np.nan): - """ - Recalculate the random amplitudes and wave numbers with the given seed. - - Parameters - ---------- - seed : :class:`int` or :any:`None` or :any:`numpy.nan`, optional - the seed of the random number generator. - If :any:`None`, a random seed is used. If :any:`numpy.nan`, - the actual seed will be kept. Default: :any:`numpy.nan` - - Notes - ----- - Even if the given seed is the present one, modes will be recalculated. - """ - if seed is None or not np.isnan(seed): - self._seed = seed - self._rng = RNG(self._seed) - # normal distributed samples for randmeth - self._z_1 = self._rng.random.normal(size=self._mode_no) - self._z_2 = self._rng.random.normal(size=self._mode_no) - # sample uniform on a sphere - sphere_coord = self._rng.sample_sphere(self.model.dim, self._mode_no) - # sample radii according to radial spectral density of the model - if self.sampling == "inversion" or ( - self.sampling == "auto" and self.model.has_ppf - ): - pdf, cdf, ppf = self.model.dist_func - rad = self._rng.sample_dist( - size=self._mode_no, pdf=pdf, cdf=cdf, ppf=ppf, a=0 - ) - else: - rad = self._rng.sample_ln_pdf( - ln_pdf=self.model.ln_spectral_rad_pdf, - size=self._mode_no, - sample_around=1.0 / self.model.len_rescaled, - ) - # get fully spatial samples by multiplying sphere samples and radii - self._cov_sample = rad * sphere_coord - - @property - def sampling(self): - """:class:`str`: Sampling strategy.""" - return self._sampling - - @sampling.setter - def sampling(self, sampling): - if sampling not in ["auto", "inversion", "mcmc"]: - raise ValueError(f"RandMeth: sampling not in {SAMPLING}.") - self._sampling = sampling - - @property - def seed(self): - """:class:`int`: Seed of the master RNG. - - Notes - ----- - If a new seed is given, the setter property not only saves the - new seed, but also creates new random modes with the new seed. - """ - return self._seed - - @seed.setter - def seed(self, new_seed): - if new_seed is not self._seed: - self.reset_seed(new_seed) - - @property - def model(self): - """:any:`CovModel`: Covariance model of the spatial random field.""" - return self._model - - @model.setter - def model(self, model): - self.update(model) - - @property - def mode_no(self): - """:class:`int`: Number of modes in the randomization method.""" - return self._mode_no - - @mode_no.setter - def mode_no(self, mode_no): - if int(mode_no) != self._mode_no: - self._mode_no = int(mode_no) - self.reset_seed(self._seed) - - @property - def value_type(self): - """:class:`str`: Type of the field values (scalar, vector).""" - return self._value_type - - def __repr__(self): - """Return String representation.""" - return ( - f"{self.name}(model={self.model}, " - f"mode_no={self._mode_no}, seed={self.seed})" - ) - - -class IncomprRandMeth(RandMeth): - r"""RandMeth for incompressible random vector fields. - - Parameters - ---------- - model : :any:`CovModel` - covariance model - mean_velocity : :class:`float`, optional - the mean velocity in x-direction - mode_no : :class:`int`, optional - number of Fourier modes. Default: ``1000`` - seed : :class:`int` or :any:`None`, optional - the seed of the random number generator. - If "None", a random seed is used. Default: :any:`None` - sampling : :class:`str`, optional - Sampling strategy. Either - - * "auto": select best strategy depending on given model - * "inversion": use inversion method - * "mcmc": use mcmc sampling - - **kwargs - Placeholder for keyword-args - - Notes - ----- - The Randomization method is used to generate isotropic - spatial incompressible random vector fields characterized - by a given covariance model. The equation is [Kraichnan1970]_: - - .. math:: - u_i\left(x\right)= \bar{u_i} \delta_{i1} + - \bar{u_i}\sqrt{\frac{\sigma^{2}}{N}}\cdot - \sum_{j=1}^{N}p_i(k_{j})\left( - Z_{1,j}\cdot\cos\left(\left\langle k_{j},x\right\rangle \right)+ - Z_{2,j}\cdot\sin\left(\left\langle k_{j},x\right\rangle \right) - \right) - - where: - - * :math:`\bar u` : mean velocity in :math:`e_1` direction - * :math:`N` : fourier mode number - * :math:`Z_{k,j}` : random samples from a normal distribution - * :math:`k_j` : samples from the spectral density distribution of - the covariance model - * :math:`p_i(k_j) = e_1 - \frac{k_i k_1}{k^2}` : the projector - ensuring the incompressibility - - References - ---------- - .. [Kraichnan1970] Kraichnan, R. H., - "Diffusion by a random velocity field.", - The physics of fluids, 13(1), 22-31., (1970) - """ - - def __init__( - self, - model, - *, - mean_velocity=1.0, - mode_no=1000, - seed=None, - sampling="auto", - **kwargs, - ): - if model.dim < 2 or model.dim > 3: - raise ValueError( - "Only 2D and 3D incompressible fields can be generated." - ) - super().__init__( - model=model, - mode_no=mode_no, - seed=seed, - sampling=sampling, - **kwargs, - ) - - self.mean_u = mean_velocity - self._value_type = "vector" - - def __call__(self, pos, add_nugget=True): - """Calculate the random modes for the randomization method. - - This method calls the `summate_incompr_*` Cython methods, - which are the heart of the randomization method. - In this class the method contains a projector to - ensure the incompressibility of the vector field. - - Parameters - ---------- - pos : (d, n), :class:`numpy.ndarray` - the position tuple with d dimensions and n points. - add_nugget : :class:`bool` - Whether to add nugget noise to the field. - - Returns - ------- - :class:`numpy.ndarray` - the random modes - """ - pos = np.asarray(pos, dtype=np.double) - summed_modes = summate_incompr( - self._cov_sample, - self._z_1, - self._z_2, - pos, - config.NUM_THREADS, - ) - nugget = self.get_nugget(summed_modes.shape) if add_nugget else 0.0 - e1 = self._create_unit_vector(summed_modes.shape) - return ( - self.mean_u * e1 - + self.mean_u - * np.sqrt(self.model.var / self._mode_no) - * summed_modes - + nugget - ) - - def _create_unit_vector(self, broadcast_shape, axis=0): - """Create a unit vector. - - Can be multiplied with a vector of shape broadcast_shape - - Parameters - ---------- - broadcast_shape : :class:`tuple` - the shape of the array with which - the unit vector is to be multiplied - axis : :class:`int`, optional - the direction of the unit vector. Default: ``0`` - - Returns - ------- - :class:`numpy.ndarray` - the unit vector - """ - shape = np.ones(len(broadcast_shape), dtype=int) - shape[0] = self.model.dim - - e1 = np.zeros(shape) - e1[axis] = 1.0 - return e1 diff --git a/src/gstools_cython/field/plot.py b/src/gstools_cython/field/plot.py deleted file mode 100644 index b17cfc71..00000000 --- a/src/gstools_cython/field/plot.py +++ /dev/null @@ -1,402 +0,0 @@ -""" -GStools subpackage providing plotting routines for spatial fields. - -.. currentmodule:: gstools.field.plot - -The following classes and functions are provided - -.. autosummary:: - plot_field - plot_vec_field -""" - -# pylint: disable=C0103, W0613, E1101, E0606 -import numpy as np -from scipy import interpolate as inter -from scipy.spatial import ConvexHull - -from gstools.tools.geometric import rotation_planes -from gstools.tools.misc import get_fig_ax - -try: - import matplotlib.pyplot as plt - from matplotlib.widgets import RadioButtons, Slider -except ImportError as exc: - raise ImportError("Plotting: Matplotlib not installed.") from exc - - -__all__ = ["plot_field", "plot_vec_field"] - - -# plotting routines ####################################################### - - -def plot_field( - fld, field="field", fig=None, ax=None, **kwargs -): # pragma: no cover - """ - Plot a spatial field. - - Parameters - ---------- - fld : :class:`Field` - The given Field class instance. - field : :class:`str`, optional - Field that should be plotted. Default: "field" - fig : :class:`Figure` or :any:`None`, optional - Figure to plot the axes on. If `None`, a new one will be created. - Default: `None` - ax : :class:`Axes` or :any:`None`, optional - Axes to plot on. If `None`, a new one will be added to the figure. - Default: `None` - **kwargs - Forwarded to the plotting routine. - """ - if fld.dim == 1: - return plot_1d(fld.pos, fld[field], fig, ax, fld.temporal, **kwargs) - return plot_nd( - fld.pos, - fld[field], - fld.mesh_type, - fig, - ax, - fld.latlon, - fld.temporal, - **kwargs, - ) - - -def plot_1d( - pos, field, fig=None, ax=None, temporal=False, ax_names=None -): # pragma: no cover - """ - Plot a 1D field. - - Parameters - ---------- - pos : :class:`list` - the position tuple, containing either the point coordinates (x, y, ...) - or the axes descriptions (for mesh_type='structured') - field : :class:`numpy.ndarray` - Field values. - temporal : :class:`bool`, optional - Indicate a metric spatio-temporal covariance model. - The time-dimension is assumed to be appended, - meaning the pos tuple is (x,y,z,...,t) or (lat, lon, t). - Default: False - fig : :class:`Figure` or :any:`None`, optional - Figure to plot the axes on. If `None`, a new one will be created. - Default: `None` - ax : :class:`Axes` or :any:`None`, optional - Axes to plot on. If `None`, a new one will be added to the figure. - Default: `None` - ax_names : :class:`list` of :class:`str`, optional - Axes names. The default is ["$x$", "field"]. - - Returns - ------- - ax : :class:`Axes` - Axis containing the plot. - """ - fig, ax = get_fig_ax(fig, ax) - title = f"Field 1D: {field.shape}" - x = pos[0] - x = x.flatten() - arg = np.argsort(x) - ax_names = _ax_names(1, temporal=temporal, ax_names=ax_names) - ax.plot(x[arg], field.ravel()[arg]) - ax.set_xlabel(ax_names[0]) - ax.set_ylabel(ax_names[1]) - ax.set_title(title) - fig.show() - return ax - - -def plot_nd( - pos, - field, - mesh_type, - fig=None, - ax=None, - latlon=False, - temporal=False, - resolution=128, - ax_names=None, - aspect="quad", - show_colorbar=True, - convex_hull=False, - contour_plot=True, - **kwargs, -): # pragma: no cover - """ - Plot field in arbitrary dimensions. - - Parameters - ---------- - pos : :class:`list` - the position tuple, containing either the point coordinates (x, y, ...) - or the axes descriptions (for mesh_type='structured') - field : :class:`numpy.ndarray` - Field values. - fig : :class:`Figure` or :any:`None`, optional - Figure to plot the axes on. If `None`, a new one will be created. - Default: `None` - ax : :class:`Axes` or :any:`None`, optional - Axes to plot on. If `None`, a new one will be added to the figure. - Default: `None` - latlon : :class:`bool`, optional - Whether the data is representing 2D fields on earths surface described - by latitude and longitude. When using this, the estimator will - use great-circle distance for variogram estimation. - Note, that only an isotropic variogram can be estimated and a - ValueError will be raised, if a direction was specified. - Bin edges need to be given in radians in this case. - Default: False - temporal : :class:`bool`, optional - Indicate a metric spatio-temporal covariance model. - The time-dimension is assumed to be appended, - meaning the pos tuple is (x,y,z,...,t) or (lat, lon, t). - Default: False - resolution : :class:`int`, optional - Resolution of the imshow plot. The default is 128. - ax_names : :class:`list` of :class:`str`, optional - Axes names. The default is ["$x$", "field"]. - aspect : :class:`str` or :any:`None` or :class:`float`, optional - Aspect of the plot. Can be "auto", "equal", "quad", None or a number - describing the aspect ratio. - The default is "quad". - show_colorbar : :class:`bool`, optional - Whether to show the colorbar. The default is True. - convex_hull : :class:`bool`, optional - Whether to show the convex hull in 2D with unstructured data. - The default is False. - contour_plot : :class:`bool`, optional - Whether to use a contour-plot in 2D. The default is True. - - Returns - ------- - ax : :class:`Axes` - Axis containing the plot. - """ - dim = len(pos) - assert dim > 1 - assert not latlon or dim == 2 + int(bool(temporal)) - if dim == 2 and contour_plot: - return _plot_2d( - pos, - field, - mesh_type, - fig, - ax, - latlon, - temporal, - ax_names, - **kwargs, - ) - if latlon: - # swap lat-lon to lon-lat (x-y) - if temporal: - pos = (pos[1], pos[0], pos[2]) - else: - pos = (pos[1], pos[0]) - if mesh_type != "unstructured": - field = np.moveaxis(field, [0, 1], [1, 0]) - ax_names = _ax_names(dim, latlon, temporal, ax_names) - # init planes - planes = rotation_planes(dim) - plane_names = [f" {ax_names[p[0]]} - {ax_names[p[1]]}" for p in planes] - ax_ends = [[p.min(), p.max()] for p in pos] - ax_rngs = [end[1] - end[0] for end in ax_ends] - ax_steps = [rng / resolution for rng in ax_rngs] - ax_extents = [ax_ends[p[0]] + ax_ends[p[1]] for p in planes] - # create figure - reformat = fig is None and ax is None - fig, ax = get_fig_ax(fig, ax) - ax.set_title(f"Field {dim}D {mesh_type} {field.shape}") - if reformat: # only format fig if it was created here - fig.set_size_inches(8, 5.5 + 0.5 * (dim - 2)) - # init additional axis, radio-buttons and sliders - s_frac = 0.5 * (dim - 2) / (6 + 0.5 * (dim - 2)) - s_size = s_frac / max(dim - 2, 1) - left, bottom = (0.25, s_frac + 0.13) if dim > 2 else (None, None) - fig.subplots_adjust(left=left, bottom=bottom) - slider = [] - for i in range(dim - 2, 0, -1): - slider_ax = fig.add_axes([0.3, i * s_size, 0.435, s_size * 0.6]) - slider.append(Slider(slider_ax, "", 0, 1, facecolor="grey")) - slider[-1].vline.set_color("k") - # create radio buttons - if dim > 2: - rax = fig.add_axes( - [0.05, 0.85 - 2 * s_frac, 0.15, 2 * s_frac], frame_on=0, alpha=0 - ) - rax.set_title(" Plane", loc="left") - radio = RadioButtons(rax, plane_names, activecolor="grey") - elif mesh_type == "unstructured" and convex_hull: - # show convex hull in 2D - hull = ConvexHull(pos.T) - for simplex in hull.simplices: - ax.plot(pos[0, simplex], pos[1, simplex], "k") - # init imshow and colorbar axis - grid = np.mgrid[0 : 1 : resolution * 1j, 0 : 1 : resolution * 1j] - f_ini, vmin, vmax = np.full_like(grid[0], np.nan), field.min(), field.max() - im = ax.imshow( - f_ini.T, interpolation="bicubic", origin="lower", vmin=vmin, vmax=vmax - ) - - # actions - def inter_plane(cuts, axes): - """Interpolate plane.""" - plane_ax = [] - for i, (rng, end, cut) in enumerate(zip(ax_rngs, ax_ends, cuts)): - if i in axes: - plane_ax.append(grid[axes.index(i)] * rng + end[0]) - else: - plane_ax.append(np.full_like(grid[0], cut, dtype=float)) - # needs to be a tuple - plane_ax = tuple(plane_ax) - if mesh_type != "unstructured": - return inter.interpn(pos, field, plane_ax, bounds_error=False) - return inter.griddata(pos.T, field, plane_ax, method="nearest") - - def update_field(*args): - """Sliders update.""" - p = plane_names.index(radio.value_selected) if dim > 2 else 0 - # dummy cut values for selected plane-axes (setting to 0) - cuts = [s.val for s in slider] - cuts.insert(planes[p][0], 0) - cuts.insert(planes[p][1], 0) - im.set_array(inter_plane(cuts, planes[p]).T) - fig.canvas.draw_idle() - - def update_plane(label): - """Radio button update.""" - p = plane_names.index(label) - cut_select = [i for i in range(dim) if i not in planes[p]] - # reset sliders - for i, s in zip(cut_select, slider): - s.label.set_text(ax_names[i]) - s.valmin, s.valmax = ax_ends[i] - s.valinit = ax_ends[i][0] + ax_rngs[i] / 2.0 - s.valstep = ax_steps[i] - s.ax.set_xlim(*ax_ends[i]) - # update representation - s.vline.set_data(2 * [s.valinit], [-0.1, 1.1]) - s.reset() - im.set_extent(ax_extents[p]) - asp = 1.0 # init value - if aspect == "quad": - asp = ax_rngs[planes[p][0]] / ax_rngs[planes[p][1]] - if aspect is not None: - ax.set_aspect(asp if aspect == "quad" else aspect) - ax.set_xlabel(ax_names[planes[p][0]]) - ax.set_ylabel(ax_names[planes[p][1]]) - update_field() - - # initial plot on xy plane - update_plane(plane_names[0]) - # bind actions - if dim > 2: - radio.on_clicked(update_plane) - for s in slider: - s.on_changed(update_field) - if show_colorbar: - fig.colorbar(im, ax=ax) - fig.show() - return ax - - -def plot_vec_field(fld, field="field", fig=None, ax=None): # pragma: no cover - """ - Plot a spatial random vector field. - - Parameters - ---------- - fld : :class:`Field` - The given field class instance. - field : :class:`str`, optional - Field that should be plotted. Default: "field" - fig : :class:`Figure` or :any:`None`, optional - Figure to plot the axes on. If `None`, a new one will be created. - Default: `None` - ax : :class:`Axes` or :any:`None`, optional - Axes to plot on. If `None`, a new one will be added to the figure. - Default: `None` - """ - if fld.mesh_type == "unstructured": - raise RuntimeError( - "Only structured vector fields are supported " - "for plotting. Please create one on a structured grid." - ) - plt_fld = fld[field] - norm = np.sqrt(plt_fld[0, :].T ** 2 + plt_fld[1, :].T ** 2) - - fig, ax = get_fig_ax(fig, ax) - title = f"Field 2D {fld.mesh_type}: {plt_fld.shape}" - x = fld.pos[0] - y = fld.pos[1] - - sp = plt.streamplot( - x, - y, - plt_fld[0, :].T, - plt_fld[1, :].T, - color=norm, - linewidth=norm / 2, - ) - ax.set_xlabel("X") - ax.set_ylabel("Y") - ax.set_title(title) - fig.colorbar(sp.lines) - fig.show() - return ax - - -def _ax_names(dim, latlon=False, temporal=False, ax_names=None): - t_fac = int(bool(temporal)) - if ax_names is not None: - assert len(ax_names) >= dim - return ax_names[:dim] - if dim == 2 + t_fac and latlon: - return ["lon", "lat"] + t_fac * ["time"] - if dim - t_fac <= 3: - return ( - ["$x$", "$y$", "$z$"][: dim - t_fac] - + t_fac * ["time"] - + (dim == 1) * ["field"] - ) - return [f"$x_{{{i}}}$" for i in range(dim - t_fac)] + t_fac * ["time"] - - -def _plot_2d( - pos, - field, - mesh_type, - fig=None, - ax=None, - latlon=False, - temporal=False, - ax_names=None, - levels=64, - antialias=True, -): # pragma: no cover - """Plot a 2d field with a contour plot.""" - fig, ax = get_fig_ax(fig, ax) - title = f"Field 2D {mesh_type}: {field.shape}" - ax_names = _ax_names(2, latlon, temporal, ax_names=ax_names) - x, y = pos[::-1] if latlon else pos - if mesh_type == "unstructured": - cont = ax.tricontourf(x, y, field.ravel(), levels=levels) - if antialias: - ax.tricontour(x, y, field.ravel(), levels=levels, zorder=-10) - else: - plt_fld = field if latlon else field.T - cont = ax.contourf(x, y, plt_fld, levels=levels) - if antialias: - ax.contour(x, y, plt_fld, levels=levels, zorder=-10) - ax.set_xlabel(ax_names[0]) - ax.set_ylabel(ax_names[1]) - ax.set_title(title) - fig.colorbar(cont) - fig.show() - return ax diff --git a/src/gstools_cython/field/srf.py b/src/gstools_cython/field/srf.py deleted file mode 100644 index d88e46c0..00000000 --- a/src/gstools_cython/field/srf.py +++ /dev/null @@ -1,218 +0,0 @@ -""" -GStools subpackage providing a class for standard spatial random fields. - -.. currentmodule:: gstools.field.srf - -The following classes are provided - -.. autosummary:: - SRF -""" - -# pylint: disable=C0103, W0221, E1102 - -import numpy as np - -from gstools.field.base import Field -from gstools.field.generator import Generator, IncomprRandMeth, RandMeth -from gstools.field.upscaling import var_coarse_graining, var_no_scaling - -__all__ = ["SRF"] - -GENERATOR = { - "RandMeth": RandMeth, - "IncomprRandMeth": IncomprRandMeth, - "VectorField": IncomprRandMeth, - "VelocityField": IncomprRandMeth, -} -"""dict: Standard generators for spatial random fields.""" - -UPSCALING = { - "coarse_graining": var_coarse_graining, - "no_scaling": var_no_scaling, -} -"""dict: Upscaling routines for spatial random fields.""" - - -class SRF(Field): - """A class to generate spatial random fields (SRF). - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model of the spatial random field. - mean : :class:`float` or :any:`callable`, optional - Mean of the SRF (in normal form). Could also be a callable. - The default is 0.0. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the SRF to transform the field values. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - Trend of the SRF (in transformed form). - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - upscaling : :class:`str`, optional - Method to be used for upscaling the variance at each point - depending on the related element volume. - See the ``point_volumes`` keyword in the :any:`SRF.__call__` routine. - At the moment, the following upscaling methods are provided: - - * "no_scaling" : No upscaling is applied to the variance. - See: :any:`var_no_scaling` - * "coarse_graining" : A volume depended variance is - calculated by the upscaling technique coarse graining. - See: :any:`var_coarse_graining` - - Default: "no_scaling" - generator : :class:`str` or :any:`Generator`, optional - Name or class of the field generator to be used. - At the moment, the following generators are provided: - - * "RandMeth" : The Randomization Method. - See: :any:`RandMeth` - * "IncomprRandMeth" : The incompressible Randomization Method. - This is the original algorithm proposed by Kraichnan 1970 - See: :any:`IncomprRandMeth` - * "VectorField" : an alias for "IncomprRandMeth" - * "VelocityField" : an alias for "IncomprRandMeth" - - Default: "RandMeth" - **generator_kwargs - Keyword arguments that are forwarded to the generator in use. - Have a look at the provided generators for further information. - """ - - def __init__( - self, - model, - mean=0.0, - normalizer=None, - trend=None, - upscaling="no_scaling", - generator="RandMeth", - **generator_kwargs, - ): - super().__init__(model, mean=mean, normalizer=normalizer, trend=trend) - # initialize private attributes - self._generator = None - self._upscaling = None - self._upscaling_func = None - # initialize attributes - self.upscaling = upscaling - self.set_generator(generator, **generator_kwargs) - - def __call__( - self, - pos=None, - seed=np.nan, - point_volumes=0.0, - mesh_type="unstructured", - post_process=True, - store=True, - ): - """Generate the spatial random field. - - The field is saved as `self.field` and is also returned. - - Parameters - ---------- - pos : :class:`list`, optional - the position tuple, containing main direction and transversal - directions - seed : :class:`int`, optional - seed for RNG for resetting. Default: keep seed from generator - point_volumes : :class:`float` or :class:`numpy.ndarray` - If your evaluation points for the field are coming from a mesh, - they are probably representing a certain element volume. - This volume can be passed by `point_volumes` to apply the - given variance upscaling. If `point_volumes` is ``0`` nothing - is changed. Default: ``0`` - mesh_type : :class:`str` - 'structured' / 'unstructured' - post_process : :class:`bool`, optional - Whether to apply mean, normalizer and trend to the field. - Default: `True` - store : :class:`str` or :class:`bool`, optional - Whether to store field (True/False) with default name - or with specified name. - The default is :any:`True` for default name "field". - - Returns - ------- - field : :class:`numpy.ndarray` - the SRF - """ - name, save = self.get_store_config(store) - # update the model/seed in the generator if any changes were made - self.generator.update(self.model, seed) - # get isometrized positions and the resulting field-shape - iso_pos, shape = self.pre_pos(pos, mesh_type) - # generate the field - field = np.reshape(self.generator(iso_pos), shape) - # upscaled variance - if not np.isscalar(point_volumes) or not np.isclose(point_volumes, 0): - scaled_var = self.upscaling_func(self.model, point_volumes) - if np.size(scaled_var) > 1: - scaled_var = np.reshape(scaled_var, shape) - field *= np.sqrt(scaled_var / self.model.sill) - return self.post_field(field, name, post_process, save) - - def upscaling_func(self, *args, **kwargs): - """Upscaling method applied to the field variance.""" - return self._upscaling_func(*args, **kwargs) - - def set_generator(self, generator, **generator_kwargs): - """Set the generator for the field. - - Parameters - ---------- - generator : :class:`str` or :any:`Generator`, optional - Name or class of the field generator to be used. - Default: "RandMeth" - **generator_kwargs - keyword arguments that are forwarded to the generator in use. - """ - gen = GENERATOR[generator] if generator in GENERATOR else generator - if not (isinstance(gen, type) and issubclass(gen, Generator)): - raise ValueError( - f"gstools.SRF: Unknown or wrong generator: {generator}" - ) - self._generator = gen(self.model, **generator_kwargs) - self.value_type = self.generator.value_type - - for val in [self.mean, self.trend]: - if not callable(val) and val is not None: - if np.size(val) > 1 and self.value_type == "scalar": - raise ValueError(f"Mean/Trend: Wrong size ({val})") - - @property - def generator(self): - """:any:`callable`: The generator of the field. - - Default: :any:`RandMeth` - """ - return self._generator - - @property - def upscaling(self): # pragma: no cover - """:class:`str`: Name of the upscaling method. - - See the ``point_volumes`` keyword in the :any:`SRF.__call__` routine. - Default: "no_scaling" - """ - return self._upscaling - - @upscaling.setter - def upscaling(self, upscaling): - if upscaling in UPSCALING: - self._upscaling = upscaling - self._upscaling_func = UPSCALING[upscaling] - else: - raise ValueError(f"SRF: Unknown upscaling method: {upscaling}") - - def __repr__(self): - """Return String representation.""" - return ( - f"{self.name}(model={self.model.name}" - f"{self._fmt_mean_norm_trend()}, generator={self.generator.name})" - ) diff --git a/src/gstools_cython/field/tools.py b/src/gstools_cython/field/tools.py deleted file mode 100644 index dfa2e3c6..00000000 --- a/src/gstools_cython/field/tools.py +++ /dev/null @@ -1,257 +0,0 @@ -""" -GStools subpackage providing tools for Fields. - -.. currentmodule:: gstools.field.tools - -The following classes and functions are provided - -.. autosummary:: - fmt_mean_norm_trend - to_vtk_helper - generate_on_mesh -""" - -# pylint: disable=W0212, C0415 -import meshio -import numpy as np - -from gstools.normalizer import Normalizer -from gstools.tools.export import to_vtk, vtk_export -from gstools.tools.misc import list_format - -__all__ = ["fmt_mean_norm_trend", "to_vtk_helper", "generate_on_mesh"] - - -def _fmt_func_val(f_cls, func_val): # pragma: no cover - if func_val is None: - return str(None) - if callable(func_val): - return "" # or format(func_val.__name__) - if np.size(func_val) > 1: - return list_format(func_val, prec=f_cls.model._prec) - return f"{float(func_val):.{f_cls.model._prec}}" - - -def _fmt_normalizer(f_cls): # pragma: no cover - norm = f_cls.normalizer - return str(None) if norm.__class__ is Normalizer else norm.name - - -def fmt_mean_norm_trend(f_cls): # pragma: no cover - """Format string repr. for mean, normalizer and trend of a field.""" - args = [ - "mean=" + _fmt_func_val(f_cls, f_cls.mean), - "normalizer=" + _fmt_normalizer(f_cls), - "trend=" + _fmt_func_val(f_cls, f_cls.trend), - ] - return "".join([", " + arg for arg in args if not arg.endswith("None")]) - - -def to_vtk_helper( - f_cls, filename=None, field_select="field", fieldname="field" -): # pragma: no cover - """Create a VTK/PyVista grid of the field or save it as a VTK file. - - This is an internal helper that will handle saving or creating objects - - Parameters - ---------- - f_cls : :any:`Field` - Field class in use. - filename : :class:`str` - Filename of the file to be saved, including the path. Note that an - ending (.vtr or .vtu) will be added to the name. If ``None`` is - passed, a PyVista dataset of the appropriate type will be returned. - field_select : :class:`str`, optional - Field that should be stored. Can be: - "field", "raw_field", "krige_field", "err_field" or "krige_var". - Default: "field" - fieldname : :class:`str`, optional - Name of the field in the VTK file. Default: "field" - """ - field = f_cls[field_select] if field_select in f_cls.field_names else None - if f_cls.value_type == "vector": - if not (f_cls.pos is None or field is None or f_cls.mesh_type is None): - suf = ["_X", "_Y", "_Z"] - fields = {} - for i in range(f_cls.model.dim): - fields[fieldname + suf[i]] = field[i] - if filename is None: - return to_vtk(f_cls.pos, fields, f_cls.mesh_type) - return vtk_export(filename, f_cls.pos, fields, f_cls.mesh_type) - raise ValueError(f"Field.to_vtk: '{field_select}' not available.") - if f_cls.value_type == "scalar": - if not (f_cls.pos is None or field is None or f_cls.mesh_type is None): - if filename is None: - return to_vtk(f_cls.pos, {fieldname: field}, f_cls.mesh_type) - return vtk_export( - filename, f_cls.pos, {fieldname: field}, f_cls.mesh_type - ) - raise ValueError(f"Field.to_vtk: '{field_select}' not available.") - raise ValueError(f"Unknown field value type: {f_cls.value_type}") - - -def generate_on_mesh( - f_cls, mesh, points="centroids", direction="all", name="field", **kwargs -): - """Generate a field on a given meshio, ogs5py or pyvista mesh. - - Parameters - ---------- - f_cls : :any:`Field` - The field class in use. - mesh : meshio.Mesh or ogs5py.MSH or PyVista mesh - The given meshio, ogs5py, or PyVista mesh - points : :class:`str`, optional - The points to evaluate the field at. - Either the "centroids" of the mesh cells - (calculated as mean of the cell vertices) or the "points" - of the given mesh. - Default: "centroids" - direction : :class:`str` or :class:`list`, optional - Here you can state which direction should be chosen for - lower dimension. For example, if you got a 2D mesh in xz direction, - you have to pass "xz". By default, all directions are used. - One can also pass a list of indices. - Default: "all" - name : :class:`str` or :class:`list` of :class:`str`, optional - Name(s) to store the field(s) in the given mesh as point_data or - cell_data. If to few names are given, digits will be appended. - Default: "field" - **kwargs - Keyword arguments forwarded to `Field.__call__`. - - Notes - ----- - This will store the field in the given mesh under the given name, - if a meshio or PyVista mesh was given. - - See: https://github.com/nschloe/meshio - - See: https://github.com/GeoStat-Framework/ogs5py - - See: https://github.com/pyvista/pyvista - """ - has_pyvista = False - has_ogs5py = False - - try: - import pyvista as pv - - has_pyvista = True - except ImportError: - pass - try: - import ogs5py as ogs - - has_ogs5py = True - except ImportError: - pass - - if isinstance(direction, str) and direction == "all": - select = list(range(f_cls.dim)) - elif isinstance(direction, str): - select = _get_select(direction)[: f_cls.dim] - else: - select = direction[: f_cls.dim] - if len(select) < f_cls.dim: - raise ValueError( - f"Field.mesh: need at least {f_cls.dim} direction(s), " - f"got '{direction}'" - ) - # convert pyvista mesh - if has_pyvista and pv.is_pyvista_dataset(mesh): - if points == "centroids": - pnts = mesh.cell_centers().points.T[select] - else: - pnts = mesh.points.T[select] - out = f_cls.unstructured(pos=pnts, **kwargs) - # Deal with the output - fields = [out] if isinstance(out, np.ndarray) else out - if f_cls.value_type == "vector": - fields = [f.T for f in fields] - for f_name, field in zip(_names(name, len(fields)), fields): - mesh[f_name] = field - # convert ogs5py mesh - elif has_ogs5py and isinstance(mesh, ogs.MSH): - if points == "centroids": - pnts = mesh.centroids_flat.T[select] - else: - pnts = mesh.NODES.T[select] - out = f_cls.unstructured(pos=pnts, **kwargs) - # convert meshio mesh - elif isinstance(mesh, meshio.Mesh): - if points == "centroids": - # define unique order of cells - offset = [] - length = [] - mesh_dim = mesh.points.shape[1] - if mesh_dim < f_cls.dim: - raise ValueError("Field.mesh: mesh dimension too low!") - pnts = np.empty((0, mesh_dim), dtype=np.double) - for cell in mesh.cells: - pnt = np.mean(mesh.points[cell.data], axis=1) - offset.append(pnts.shape[0]) - length.append(pnt.shape[0]) - pnts = np.vstack((pnts, pnt)) - # generate pos for __call__ - pnts = pnts.T[select] - out = f_cls.unstructured(pos=pnts, **kwargs) - fields = [out] if isinstance(out, np.ndarray) else out - if f_cls.value_type == "vector": - fields = [f.T for f in fields] - f_lists = [] - for field in fields: - f_list = [] - for off, leng in zip(offset, length): - f_list.append(field[off : off + leng]) - f_lists.append(f_list) - for f_name, f_list in zip(_names(name, len(f_lists)), f_lists): - mesh.cell_data[f_name] = f_list - else: - out = f_cls.unstructured(pos=mesh.points.T[select], **kwargs) - fields = [out] if isinstance(out, np.ndarray) else out - if f_cls.value_type == "vector": - fields = [f.T for f in fields] - for f_name, field in zip(_names(name, len(fields)), fields): - mesh.point_data[f_name] = field - else: - raise ValueError("Field.mesh: Unknown mesh format!") - return out - - -def _names(name, cnt): - name = [name] if isinstance(name, str) else list(name)[:cnt] - if len(name) < cnt: - name += [f"{name[-1]}{i + 1}" for i in range(cnt - len(name))] - return name - - -def _get_select(direction): - select = [] - if not 0 < len(direction) < 4: - raise ValueError( - f"Field.mesh: need 1 to 3 direction(s), got '{direction}'" - ) - for axis in direction: - if axis == "x": - if 0 in select: - raise ValueError( - f"Field.mesh: got duplicate directions {direction}" - ) - select.append(0) - elif axis == "y": - if 1 in select: - raise ValueError( - f"Field.mesh: got duplicate directions {direction}" - ) - select.append(1) - elif axis == "z": - if 2 in select: - raise ValueError( - f"Field.mesh: got duplicate directions {direction}" - ) - select.append(2) - else: - raise ValueError(f"Field.mesh: got unknown direction {axis}") - return select diff --git a/src/gstools_cython/field/upscaling.py b/src/gstools_cython/field/upscaling.py deleted file mode 100644 index 857bfc45..00000000 --- a/src/gstools_cython/field/upscaling.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -GStools subpackage providing upscaling routines for the spatial random field. - -.. currentmodule:: gstools.field.upscaling - -The following functions are provided - -.. autosummary:: - :toctree: - - var_coarse_graining - var_no_scaling -""" - -# pylint: disable=W0613 -import warnings - -import numpy as np - -__all__ = ["var_coarse_graining", "var_no_scaling"] - - -# scaling routines ############################################################ - - -def var_coarse_graining(model, point_volumes=0.0): - r"""Coarse Graning procedure to upscale the variance for uniform flow. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for the field. - point_volumes : :class:`float` or :class:`numpy.ndarray` - Volumes of the elements at the given points. Default: ``0`` - - Returns - ------- - scaled_var : :class:`float` or :class:`numpy.ndarray` - The upscaled variance - - Notes - ----- - This procedure was presented in [Attinger03]_. It applies the - upscaling procedure 'Coarse Graining' to the Groundwater flow equation - under uniform flow on a lognormal distributed conductivity field following - a gaussian covariance function. A filter over a cube with a given - edge-length :math:`\lambda` is applied and an upscaled conductivity field - is obtained. - The upscaled field is again following a gaussian covariance function with - scale dependent variance and length-scale: - - .. math:: - \lambda &= V^{\frac{1}{d}} \\ - \sigma^2\left(\lambda\right) &= - \sigma^2\cdot\left( - \frac{\ell^2}{\ell^2+\left(\frac{\lambda}{2}\right)^2} - \right)^{\frac{d}{2}} \\ - \ell\left(\lambda\right) &= - \left(\ell^2+\left(\frac{\lambda}{2}\right)^2\right)^{\frac{1}{2}} - - Therby :math:`\lambda` will be calculated from the given - ``point_volumes`` :math:`V` by assuming a cube with the given volume. - - The upscaled length scale will be ignored by this routine. - - References - ---------- - .. [Attinger03] Attinger, S. 2003, - ''Generalized coarse graining procedures for flow in porous media'', - Computational Geosciences, 7(4), 253–273. - """ - if not np.isclose(model.nugget, 0): - warnings.warn( - "var_coarse_graining: non-zero nugget will violate upscaling!" - ) - # interpret volume as a hypercube and calculate the edge length - edge = point_volumes ** (1.0 / model.dim) - var_factor = ( - model.len_scale**2 / (model.len_scale**2 + edge**2 / 4) - ) ** (model.dim / 2.0) - - return model.sill * var_factor - - -def var_no_scaling(model, *args, **kwargs): - r"""Dummy function to bypass scaling. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for the field. - - Returns - ------- - var : :class:`float` - The model variance. - """ - return model.sill diff --git a/src/gstools_cython/krige/__init__.py b/src/gstools_cython/krige/__init__.py deleted file mode 100644 index 66d03246..00000000 --- a/src/gstools_cython/krige/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -GStools subpackage providing kriging. - -.. currentmodule:: gstools.krige - -Kriging Classes -^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - Krige - Simple - Ordinary - Universal - ExtDrift - Detrended -""" - -from gstools.krige.base import Krige -from gstools.krige.methods import ( - Detrended, - ExtDrift, - Ordinary, - Simple, - Universal, -) - -__all__ = ["Krige", "Simple", "Ordinary", "Universal", "ExtDrift", "Detrended"] diff --git a/src/gstools_cython/krige/base.py b/src/gstools_cython/krige/base.py deleted file mode 100755 index 78aa2a9f..00000000 --- a/src/gstools_cython/krige/base.py +++ /dev/null @@ -1,729 +0,0 @@ -""" -GStools subpackage providing a base class for kriging. - -.. currentmodule:: gstools.krige.base - -The following classes are provided - -.. autosummary:: - Krige -""" - -# pylint: disable=C0103, W0221, E1102, R0201, C0412 -import collections - -import numpy as np -import scipy.linalg as spl -from scipy.spatial.distance import cdist - -from gstools import config -from gstools.field.base import Field -from gstools.krige.tools import get_drift_functions, set_condition -from gstools.tools.geometric import rotated_main_axes -from gstools.tools.misc import eval_func -from gstools.variogram import vario_estimate - -if config.USE_RUST: # pragma: no cover - # pylint: disable=E0401 - from gstools_core import calc_field_krige, calc_field_krige_and_variance -else: - from gstools.krige.krigesum import ( - calc_field_krige, - calc_field_krige_and_variance, - ) - -__all__ = ["Krige"] - - -P_INV = {"pinv": spl.pinv, "pinvh": spl.pinvh} -"""dict: Standard pseudo-inverse routines""" - - -class Krige(Field): - """ - A Swiss Army knife for kriging. - - A Kriging class enabling the basic kriging routines: - Simple-, Ordinary-, Universal-, External Drift- - and detrended/regression-Kriging as well as - Kriging the Mean [Wackernagel2003]_. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for kriging. - cond_pos : :class:`list` - tuple, containing the given condition positions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - drift_functions : :class:`list` of :any:`callable`, :class:`str` or :class:`int` - Either a list of callable functions, an integer representing - the polynomial order of the drift or one of the following strings: - - * "linear" : regional linear drift (equals order=1) - * "quadratic" : regional quadratic drift (equals order=2) - - ext_drift : :class:`numpy.ndarray` or :any:`None`, optional - the external drift values at the given cond. positions. - mean : :class:`float`, optional - mean value used to shift normalized conditioning data. - Could also be a callable. The default is None. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the input data to gain normality. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - A callable trend function. Should have the signature: f(x, [y, z, ...]) - This is used for detrended kriging, where the trended is subtracted - from the conditions before kriging is applied. - This can be used for regression kriging, where the trend function - is determined by an external regression algorithm. - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - unbiased : :class:`bool`, optional - Whether the kriging weights should sum up to 1, so the estimator - is unbiased. If unbiased is `False` and no drifts are given, - this results in simple kriging. - Default: True - exact : :class:`bool`, optional - Whether the interpolator should reproduce the exact input values. - If `False`, `cond_err` is interpreted as measurement error - at the conditioning points and the result will be more smooth. - Default: False - cond_err : :class:`str`, :class :class:`float` or :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied to - all points or an array with individual values for each point. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - pseudo_inv : :class:`bool`, optional - Whether the kriging system is solved with the pseudo inverted - kriging matrix. If `True`, this leads to more numerical stability - and redundant points are averaged. But it can take more time. - Default: True - pseudo_inv_type : :class:`str` or :any:`callable`, optional - Here you can select the algorithm to compute the pseudo-inverse matrix: - - * `"pinv"`: use `pinv` from `scipy` which uses `SVD` - * `"pinvh"`: use `pinvh` from `scipy` which uses eigen-values - - If you want to use another routine to invert the kriging matrix, - you can pass a callable which takes a matrix and returns the inverse. - Default: `"pinv"` - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given conditioning data. - Default: False - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - - Notes - ----- - If you have changed any properties in the class, you can update the kriging - setup by calling :any:`Krige.set_condition` without any arguments. - - References - ---------- - .. [Wackernagel2003] Wackernagel, H., - "Multivariate geostatistics", - Springer, Berlin, Heidelberg (2003) - """ - - valid_value_types = ["scalar"] - """:class:`list` of :class:`str`: valid field value types.""" - - default_field_names = ["field", "krige_var", "mean_field"] - """:class:`list`: Default field names.""" - - def __init__( - self, - model, - cond_pos, - cond_val, - drift_functions=None, - ext_drift=None, - mean=None, - normalizer=None, - trend=None, - unbiased=True, - exact=False, - cond_err="nugget", - pseudo_inv=True, - pseudo_inv_type="pinv", - fit_normalizer=False, - fit_variogram=False, - ): - super().__init__(model, mean=mean, normalizer=normalizer, trend=trend) - self._unbiased = bool(unbiased) - self._exact = bool(exact) - self._pseudo_inv = bool(pseudo_inv) - self._pseudo_inv_type = None - self.pseudo_inv_type = pseudo_inv_type - # initialize private attributes - self._cond_pos = None - self._cond_val = None - self._cond_err = None - self._krige_mat = None - self._krige_pos = None - self._cond_trend = None - self._cond_ext_drift = np.array([]) - self._drift_functions = None - self.set_drift_functions(drift_functions) - self.set_condition( - cond_pos, - cond_val, - ext_drift, - cond_err, - fit_normalizer, - fit_variogram, - ) - - def __call__( - self, - pos=None, - mesh_type="unstructured", - ext_drift=None, - chunk_size=None, - only_mean=False, - return_var=True, - post_process=True, - store=True, - ): - """ - Generate the kriging field. - - The field is saved as `self.field` and is also returned. - The error variance is saved as `self.krige_var` and is also returned. - - Parameters - ---------- - pos : :class:`list`, optional - the position tuple, containing main direction and transversal - directions (x, [y, z]) - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - ext_drift : :class:`numpy.ndarray` or :any:`None`, optional - the external drift values at the given positions (only for EDK) - chunk_size : :class:`int`, optional - Chunk size to cut down the size of the kriging system to prevent - memory errors. - Default: None - only_mean : :class:`bool`, optional - Whether to only calculate the mean of the kriging field. - Default: `False` - return_var : :class:`bool`, optional - Whether to return the variance along with the field. - Default: `True` - post_process : :class:`bool`, optional - Whether to apply mean, normalizer and trend to the field. - Default: `True` - store : :class:`str` or :class:`bool` or :class:`list`, optional - Whether to store kriging fields (True/False) with default name - or with specified names. - The default is :any:`True` for default names - ["field", "krige_var"] or "mean_field" if `only_mean=True`. - - Returns - ------- - field : :class:`numpy.ndarray` - the kriged field or mean_field - krige_var : :class:`numpy.ndarray`, optional - the kriging error variance - (if return_var is True and only_mean is False) - """ - return_var &= not only_mean # don't return variance when calc. mean - fld_cnt = 2 if return_var else 1 - default = self.default_field_names[2] if only_mean else None - name, save = self.get_store_config(store, default, fld_cnt) - - iso_pos, shape = self.pre_pos(pos, mesh_type) - pnt_cnt = len(iso_pos[0]) - - field = np.empty(pnt_cnt, dtype=np.double) - krige_var = np.empty(pnt_cnt, dtype=np.double) if return_var else None - # set constant mean if present and wanted - if only_mean and self.drift_no == 0: - field[...] = self.get_mean(post_process=False) - # execute the kriging routine - else: - # set chunk size - chunk_size = pnt_cnt if chunk_size is None else int(chunk_size) - chunk_no = int(np.ceil(pnt_cnt / chunk_size)) - ext_drift = self._pre_ext_drift(pnt_cnt, ext_drift) - # iterate chunks - for i in range(chunk_no): - # get chunk slice for actual chunk - chunk_slice = ( - i * chunk_size, - min(pnt_cnt, (i + 1) * chunk_size), - ) - c_slice = slice(*chunk_slice) - # get RHS of the kriging system - k_vec = self._get_krige_vecs( - iso_pos, chunk_slice, ext_drift, only_mean - ) - # generate the raw kriging field and error variance - self._summate(field, krige_var, c_slice, k_vec, return_var) - # reshape field if we got a structured mesh - field = np.reshape(field, shape) - # save field to class - field = self.post_field(field, name[0], post_process, save[0]) - if return_var: # care about the estimated error variance - krige_var = np.reshape( - np.maximum(self.model.sill - krige_var, 0), shape - ) - krige_var = self.post_field(krige_var, name[1], False, save[1]) - return field, krige_var - return field - - def _summate(self, field, krige_var, c_slice, k_vec, return_var): - if return_var: # estimate error variance - field[c_slice], krige_var[c_slice] = calc_field_krige_and_variance( - self._krige_mat, k_vec, self._krige_cond - ) - else: # solely calculate the interpolated field - field[c_slice] = calc_field_krige( - self._krige_mat, k_vec, self._krige_cond - ) - - def _inv(self, mat): - # return pseudo-inverted matrix if wanted (numerically more stable) - if self.pseudo_inv: - # if the given type is a callable, call it - if callable(self.pseudo_inv_type): - return self.pseudo_inv_type(mat) - # use the selected method to compute the pseudo-inverse matrix - return P_INV[self.pseudo_inv_type](mat) - # if no pseudo-inverse is wanted, calculate the real inverse - return spl.inv(mat) - - def _get_krige_mat(self): - """Calculate the inverse matrix of the kriging equation.""" - res = np.empty((self.krige_size, self.krige_size), dtype=np.double) - # fill the kriging matrix with the covariance - res[: self.cond_no, : self.cond_no] = self.model.covariance( - self._get_dists(self._krige_pos) - ) - # apply the measurement error (nugget by default) - res[np.diag_indices(self.cond_no)] += self.cond_err - # set unbias condition (weights have to sum up to 1) - if self.unbiased: - res[self.cond_no, : self.cond_no] = 1 - res[: self.cond_no, self.cond_no] = 1 - # set functional drift terms - for i, f in enumerate(self.drift_functions): - drift_tmp = f(*self.cond_pos) - res[-self.drift_no + i, : self.cond_no] = drift_tmp - res[: self.cond_no, -self.drift_no + i] = drift_tmp - # set external drift terms - if self.ext_drift_no > 0: - ext_size = self.krige_size - self.ext_drift_no - res[ext_size:, : self.cond_no] = self.cond_ext_drift - res[: self.cond_no, ext_size:] = self.cond_ext_drift.T - # set lower right part of the matrix to 0 - res[self.cond_no :, self.cond_no :] = 0 - return self._inv(res) - - def _get_krige_vecs( - self, pos, chunk_slice=(0, None), ext_drift=None, only_mean=False - ): - """Calculate the RHS of the kriging equation.""" - # determine the chunk size - chunk_size = len(pos[0]) if chunk_slice[1] is None else chunk_slice[1] - chunk_size -= chunk_slice[0] - chunk_pos = None # init value - res = np.empty((self.krige_size, chunk_size), dtype=np.double) - if only_mean: - # set points to limit of the covariance to only get the mean - res[: self.cond_no, :] = 0 - else: - # get correct covariance functions (depending on exact values) - cf = self.model.cov_nugget if self.exact else self.model.covariance - res[: self.cond_no, :] = cf( - self._get_dists(self._krige_pos, pos, chunk_slice) - ) - # apply the unbiased condition - if self.unbiased: - res[self.cond_no, :] = 1 - # drift function need the anisotropic and rotated positions - if self.int_drift_no > 0: - chunk_pos = self.model.anisometrize(pos)[:, slice(*chunk_slice)] - # apply functional drift - for i, f in enumerate(self.drift_functions): - res[-self.drift_no + i, :] = f(*chunk_pos) - # apply external drift - if self.ext_drift_no > 0: - ext_size = self.krige_size - self.ext_drift_no - res[ext_size:, :] = ext_drift[:, slice(*chunk_slice)] - return res - - def _pre_ext_drift(self, pnt_cnt, ext_drift=None, set_cond=False): - """ - Preprocessor for external drifts. - - Parameters - ---------- - pnt_cnt : :class:`numpy.ndarray` - Number of points of the mesh. - ext_drift : :class:`numpy.ndarray` or :any:`None`, optional - the external drift values at the given positions (only for EDK) - For multiple external drifts, the first dimension - should be the index of the drift term. - set_cond : :class:`bool`, optional - State if the given external drift is set for the conditioning - points. Default: False - - Returns - ------- - ext_drift : :class:`numpy.ndarray` or :any:`None` - the drift values at the given positions - """ - if ext_drift is not None: - ext_drift = np.atleast_2d(np.asarray(ext_drift, dtype=np.double)) - if ext_drift.size == 0: # treat empty array as no ext_drift - return np.array([]) - if set_cond: - if len(ext_drift.shape) > 2 or ext_drift.shape[1] != pnt_cnt: - raise ValueError("Krige: wrong number of ext. drifts.") - return ext_drift - ext_shape = np.shape(ext_drift) - shape = (self.ext_drift_no, pnt_cnt) - if self.drift_no > 1 and ext_shape[0] != self.ext_drift_no: - raise ValueError("Krige: wrong number of external drifts.") - if np.prod(ext_shape) != np.prod(shape): - raise ValueError("Krige: wrong number of ext. drift values.") - return np.asarray(ext_drift, dtype=np.double).reshape(shape) - if not set_cond and self._cond_ext_drift.size > 0: - raise ValueError("Krige: wrong number of ext. drift values.") - return np.array([]) - - def _get_dists(self, pos1, pos2=None, pos2_slice=(0, None)): - """ - Calculate pairwise distances. - - Parameters - ---------- - pos1 : :class:`tuple` of :class:`numpy.ndarray` - the first position tuple - pos2 : :class:`tuple` of :class:`numpy.ndarray`, optional - the second position tuple. If none, the first one is taken. - pos2_slice : :class:`tuple` of :class:`int`, optional - Start and stop of slice for the pos2 array. Default: all values. - - Returns - ------- - :class:`numpy.ndarray` - Matrix containing the pairwise distances. - """ - if pos2 is None: - return cdist(pos1.T, pos1.T) - return cdist(pos1.T, pos2.T[slice(*pos2_slice), ...]) - - def get_mean(self, post_process=True): - """Calculate the estimated mean of the detrended field. - - Parameters - ---------- - post_process : :class:`bool`, optional - Whether to apply field-mean and normalizer. - Default: `True` - - Returns - ------- - mean : :class:`float` or :any:`None` - Mean of the Kriging System. - - Notes - ----- - Only not ``None`` if the Kriging System has a constant mean. - This means, no drift is given and the given field-mean is constant. - The result is neglecting a potential given trend. - """ - # if there are drift-terms, no constant mean can be calculated -> None - # if mean should not be post-processed, it exists when no drift given - if not self.has_const_mean and (post_process or self.drift_no > 0): - return None - res = 0.0 # for simple kriging return the given mean - # correctly setting given mean - mean = 0.0 if self.mean is None else self.mean - # for ordinary kriging return the estimated mean - if self.unbiased: - # set the right side of the kriging system to the limit of cov. - mean_est = np.concatenate((np.full_like(self.cond_val, 0.0), [1])) - # execute the kriging routine with einsum - res = np.einsum( - "i,ij,j", self._krige_cond, self._krige_mat, mean_est - ) - return self.normalizer.denormalize(res + mean) if post_process else res - - def set_condition( - self, - cond_pos=None, - cond_val=None, - ext_drift=None, - cond_err=None, - fit_normalizer=False, - fit_variogram=False, - ): - """Set the conditions for kriging. - - This method could also be used to update the kriging setup, when - properties were changed. Then you can call it without arguments. - - Parameters - ---------- - cond_pos : :class:`list`, optional - the position tuple of the conditions (x, [y, z]). Default: current. - cond_val : :class:`numpy.ndarray`, optional - the values of the conditions (nan values will be ignored). - Default: current. - ext_drift : :class:`numpy.ndarray` or :any:`None`, optional - the external drift values at the given conditions (only for EDK) - For multiple external drifts, the first dimension - should be the index of the drift term. When passing `None`, the - extisting external drift will be used. - cond_err : :class:`str`, :class :class:`float`, :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied - to all points or an array with individual values for each point. - The measurement error has to be <= nugget. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given conditioning data. - Default: False - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - """ - # only use existing external drift, if no new positions are given - ext_drift = ( - self._cond_ext_drift - if (ext_drift is None and cond_pos is None) - else ext_drift - ) - # use existing values or set default - cond_pos = self._cond_pos if cond_pos is None else cond_pos - cond_val = self._cond_val if cond_val is None else cond_val - cond_err = self._cond_err if cond_err is None else cond_err - cond_err = "nugget" if cond_err is None else cond_err # default - if cond_pos is None or cond_val is None: - raise ValueError("Krige.set_condition: missing cond_pos/cond_val.") - # correctly format cond_pos and cond_val - self._cond_pos, self._cond_val = set_condition( - cond_pos, cond_val, self.dim - ) - if fit_normalizer: # fit normalizer to detrended data - self.normalizer.fit(self.cond_val - self.cond_trend) - if fit_variogram: # fitting model to empirical variogram of data - # normalize field - if self.model.latlon and self.model.temporal: - msg = "Krige: can't fit variogram for spatio-temporal latlon data." - raise ValueError(msg) - field = self.normalizer.normalize(self.cond_val - self.cond_trend) - field -= self.cond_mean - sill = np.var(field) - if self.model.is_isotropic: - emp_vario = vario_estimate( - self.cond_pos, - field, - latlon=self.model.latlon, - geo_scale=self.model.geo_scale, - ) - else: - axes = rotated_main_axes(self.model.dim, self.model.angles) - emp_vario = vario_estimate( - self.cond_pos, field, direction=axes - ) - # set the sill to the field variance - self.model.fit_variogram(*emp_vario, sill=sill) - # set the measurement errors - self.cond_err = cond_err - # set the external drift values and the conditioning points - self._cond_ext_drift = self._pre_ext_drift( - self.cond_no, ext_drift, set_cond=True - ) - # upate the internal kriging settings - self._krige_pos = self.model.isometrize(self.cond_pos) - # krige pos are the unrotated and isotropic condition positions - self._krige_mat = self._get_krige_mat() - - def set_drift_functions(self, drift_functions=None): - """ - Set the drift functions for universal kriging. - - Parameters - ---------- - drift_functions : :class:`list` of :any:`callable`, :class:`str` or :class:`int` - Either a list of callable functions, an integer representing - the polynomial order of the drift or one of the following strings: - - * "linear" : regional linear drift (equals order=1) - * "quadratic" : regional quadratic drift (equals order=2) - - Raises - ------ - ValueError - If the given drift functions are not callable. - """ - if drift_functions is None: - self._drift_functions = [] - elif isinstance(drift_functions, (str, int)): - self._drift_functions = get_drift_functions( - self.dim, drift_functions - ) - else: - if isinstance(drift_functions, collections.abc.Iterator): - drift_functions = list(drift_functions) - # check for a single content thats not a string - try: - iter(drift_functions) - except TypeError: - drift_functions = [drift_functions] - for f in drift_functions: - if not callable(f): - raise ValueError("Krige: Drift functions not callable") - self._drift_functions = drift_functions - - @property - def _krige_cond(self): - """:class:`numpy.ndarray`: The prepared kriging conditions.""" - pad_size = self.drift_no + int(self.unbiased) - # detrend data and normalize - val = self.normalizer.normalize(self.cond_val - self.cond_trend) - # set to zero mean - val -= self.cond_mean - return np.pad(val, (0, pad_size), mode="constant", constant_values=0) - - @property - def cond_pos(self): - """:class:`list`: The position tuple of the conditions.""" - return self._cond_pos - - @property - def cond_val(self): - """:class:`list`: The values of the conditions.""" - return self._cond_val - - @property - def cond_err(self): - """:class:`list`: The measurement errors at the condition points.""" - if isinstance(self._cond_err, str) and self._cond_err == "nugget": - return self.model.nugget - return self._cond_err - - @cond_err.setter - def cond_err(self, value): - if isinstance(value, str) and value == "nugget": - self._cond_err = value - else: - if self.exact: - raise ValueError( - "krige.cond_err: measurement errors can't be given, " - "when interpolator should be exact." - ) - value = np.asarray(value, dtype=np.double).reshape(-1) - if value.size == 1: - self._cond_err = value.item() - else: - if value.size != self.cond_no: - raise ValueError( - "krige.cond_err: wrong number of measurement errors." - ) - self._cond_err = value - - @property - def cond_no(self): - """:class:`int`: The number of the conditions.""" - return len(self._cond_val) - - @property - def cond_ext_drift(self): - """:class:`numpy.ndarray`: The ext. drift at the conditions.""" - return self._cond_ext_drift - - @property - def cond_mean(self): - """:class:`numpy.ndarray`: Trend at the conditions.""" - return eval_func(self.mean, self.cond_pos, self.dim, broadcast=True) - - @property - def cond_trend(self): - """:class:`numpy.ndarray`: Trend at the conditions.""" - return eval_func(self.trend, self.cond_pos, self.dim, broadcast=True) - - @property - def unbiased(self): - """:class:`bool`: Whether the kriging is unbiased or not.""" - return self._unbiased - - @property - def exact(self): - """:class:`bool`: Whether the interpolator is exact.""" - return self._exact - - @property - def pseudo_inv(self): - """:class:`bool`: Whether pseudo inverse matrix is used.""" - return self._pseudo_inv - - @property - def pseudo_inv_type(self): - """:class:`str`: Method selector for pseudo inverse calculation.""" - return self._pseudo_inv_type - - @pseudo_inv_type.setter - def pseudo_inv_type(self, val): - if val not in P_INV and not callable(val): - raise ValueError(f"Krige: pseudo_inv_type not in {sorted(P_INV)}") - self._pseudo_inv_type = val - - @property - def drift_functions(self): - """:class:`list` of :any:`callable`: The drift functions.""" - return self._drift_functions - - @property - def has_const_mean(self): - """:class:`bool`: Whether the field has a constant mean or not.""" - return self.drift_no == 0 and not callable(self.mean) - - @property - def krige_size(self): - """:class:`int`: Size of the kriging system.""" - return self.cond_no + self.drift_no + int(self.unbiased) - - @property - def drift_no(self): - """:class:`int`: Number of drift values per point.""" - return self.int_drift_no + self.ext_drift_no - - @property - def int_drift_no(self): - """:class:`int`: Number of internal drift values per point.""" - return len(self.drift_functions) - - @property - def ext_drift_no(self): - """:class:`int`: Number of external drift values per point.""" - return self.cond_ext_drift.shape[0] - - def __repr__(self): - """Return String representation.""" - return ( - f"{self.name}(model={self.model.name}, " - f"cond_no={self.cond_no}{self._fmt_mean_norm_trend()})" - ) diff --git a/src/gstools_cython/krige/methods.py b/src/gstools_cython/krige/methods.py deleted file mode 100644 index 19ffed56..00000000 --- a/src/gstools_cython/krige/methods.py +++ /dev/null @@ -1,520 +0,0 @@ -""" -GStools subpackage providing a class for simple kriging. - -.. currentmodule:: gstools.krige.methods - -The following classes are provided - -.. autosummary:: - Simple - Ordinary - Universal - ExtDrift - Detrended -""" - -# pylint: disable=C0103 -from gstools.krige.base import Krige - -__all__ = ["Simple", "Ordinary", "Universal", "ExtDrift", "Detrended"] - - -class Simple(Krige): - """ - Simple kriging. - - Simple kriging is used to interpolate data with a given mean. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for kriging. - cond_pos : :class:`list` - tuple, containing the given condition positions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - mean : :class:`float`, optional - mean value used to shift normalized conditioning data. - Could also be a callable. The default is None. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the input data to gain normality. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - A callable trend function. Should have the signature: f(x, [y, z, ...]) - This is used for detrended kriging, where the trended is subtracted - from the conditions before kriging is applied. - This can be used for regression kriging, where the trend function - is determined by an external regression algorithm. - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - exact : :class:`bool`, optional - Whether the interpolator should reproduce the exact input values. - If `False`, `cond_err` is interpreted as measurement error - at the conditioning points and the result will be more smooth. - Default: False - cond_err : :class:`str`, :class :class:`float` or :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied to - all points or an array with individual values for each point. - The measurement error has to be <= nugget. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - pseudo_inv : :class:`bool`, optional - Whether the kriging system is solved with the pseudo inverted - kriging matrix. If `True`, this leads to more numerical stability - and redundant points are averaged. But it can take more time. - Default: True - pseudo_inv_type : :class:`str` or :any:`callable`, optional - Here you can select the algorithm to compute the pseudo-inverse matrix: - - * `"pinv"`: use `pinv` from `scipy` which uses `SVD` - * `"pinvh"`: use `pinvh` from `scipy` which uses eigen-values - - If you want to use another routine to invert the kriging matrix, - you can pass a callable which takes a matrix and returns the inverse. - Default: `"pinv"` - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given conditioning data. - Default: False - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - """ - - def __init__( - self, - model, - cond_pos, - cond_val, - mean=0.0, - normalizer=None, - trend=None, - exact=False, - cond_err="nugget", - pseudo_inv=True, - pseudo_inv_type="pinv", - fit_normalizer=False, - fit_variogram=False, - ): - super().__init__( - model, - cond_pos, - cond_val, - mean=mean, - normalizer=normalizer, - trend=trend, - unbiased=False, - exact=exact, - cond_err=cond_err, - pseudo_inv=pseudo_inv, - pseudo_inv_type=pseudo_inv_type, - fit_normalizer=fit_normalizer, - fit_variogram=fit_variogram, - ) - - -class Ordinary(Krige): - """ - Ordinary kriging. - - Ordinary kriging is used to interpolate data and estimate a proper mean. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for kriging. - cond_pos : :class:`list` - tuple, containing the given condition positions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the input data to gain normality. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - A callable trend function. Should have the signature: f(x, [y, z, ...]) - This is used for detrended kriging, where the trended is subtracted - from the conditions before kriging is applied. - This can be used for regression kriging, where the trend function - is determined by an external regression algorithm. - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - exact : :class:`bool`, optional - Whether the interpolator should reproduce the exact input values. - If `False`, `cond_err` is interpreted as measurement error - at the conditioning points and the result will be more smooth. - Default: False - cond_err : :class:`str`, :class :class:`float` or :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied to - all points or an array with individual values for each point. - The measurement error has to be <= nugget. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - pseudo_inv : :class:`bool`, optional - Whether the kriging system is solved with the pseudo inverted - kriging matrix. If `True`, this leads to more numerical stability - and redundant points are averaged. But it can take more time. - Default: True - pseudo_inv_type : :class:`str` or :any:`callable`, optional - Here you can select the algorithm to compute the pseudo-inverse matrix: - - * `"pinv"`: use `pinv` from `scipy` which uses `SVD` - * `"pinvh"`: use `pinvh` from `scipy` which uses eigen-values - - If you want to use another routine to invert the kriging matrix, - you can pass a callable which takes a matrix and returns the inverse. - Default: `"pinv"` - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given conditioning data. - Default: False - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - """ - - def __init__( - self, - model, - cond_pos, - cond_val, - normalizer=None, - trend=None, - exact=False, - cond_err="nugget", - pseudo_inv=True, - pseudo_inv_type="pinv", - fit_normalizer=False, - fit_variogram=False, - ): - super().__init__( - model, - cond_pos, - cond_val, - trend=trend, - normalizer=normalizer, - exact=exact, - cond_err=cond_err, - pseudo_inv=pseudo_inv, - pseudo_inv_type=pseudo_inv_type, - fit_normalizer=fit_normalizer, - fit_variogram=fit_variogram, - ) - - -class Universal(Krige): - """ - Universal kriging. - - Universal kriging is used to interpolate given data with a variable mean, - that is determined by a functional drift. - - This estimator is set to be unbiased by default. - This means, that the weights in the kriging equation sum up to 1. - Consequently no constant function needs to be given for a constant drift, - since the unbiased condition is applied to all given drift functions. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for kriging. - cond_pos : :class:`list` - tuple, containing the given condition positions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - drift_functions : :class:`list` of :any:`callable`, :class:`str` or :class:`int` - Either a list of callable functions, an integer representing - the polynomial order of the drift or one of the following strings: - - * "linear" : regional linear drift (equals order=1) - * "quadratic" : regional quadratic drift (equals order=2) - - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the input data to gain normality. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - A callable trend function. Should have the signature: f(x, [y, z, ...]) - This is used for detrended kriging, where the trended is subtracted - from the conditions before kriging is applied. - This can be used for regression kriging, where the trend function - is determined by an external regression algorithm. - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - exact : :class:`bool`, optional - Whether the interpolator should reproduce the exact input values. - If `False`, `cond_err` is interpreted as measurement error - at the conditioning points and the result will be more smooth. - Default: False - cond_err : :class:`str`, :class :class:`float` or :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied to - all points or an array with individual values for each point. - The measurement error has to be <= nugget. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - pseudo_inv : :class:`bool`, optional - Whether the kriging system is solved with the pseudo inverted - kriging matrix. If `True`, this leads to more numerical stability - and redundant points are averaged. But it can take more time. - Default: True - pseudo_inv_type : :class:`str` or :any:`callable`, optional - Here you can select the algorithm to compute the pseudo-inverse matrix: - - * `"pinv"`: use `pinv` from `scipy` which uses `SVD` - * `"pinvh"`: use `pinvh` from `scipy` which uses eigen-values - - If you want to use another routine to invert the kriging matrix, - you can pass a callable which takes a matrix and returns the inverse. - Default: `"pinv"` - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given conditioning data. - Default: False - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - """ - - def __init__( - self, - model, - cond_pos, - cond_val, - drift_functions, - normalizer=None, - trend=None, - exact=False, - cond_err="nugget", - pseudo_inv=True, - pseudo_inv_type="pinv", - fit_normalizer=False, - fit_variogram=False, - ): - super().__init__( - model, - cond_pos, - cond_val, - drift_functions=drift_functions, - normalizer=normalizer, - trend=trend, - exact=exact, - cond_err=cond_err, - pseudo_inv=pseudo_inv, - pseudo_inv_type=pseudo_inv_type, - fit_normalizer=fit_normalizer, - fit_variogram=fit_variogram, - ) - - -class ExtDrift(Krige): - """ - External drift kriging (EDK). - - External drift kriging is used to interpolate given data - with a variable mean, that is determined by an external drift. - - This estimator is set to be unbiased by default. - This means, that the weights in the kriging equation sum up to 1. - Consequently no constant external drift needs to be given to estimate - a proper mean. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for kriging. - cond_pos : :class:`list` - tuple, containing the given condition positions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - ext_drift : :class:`numpy.ndarray` - the external drift values at the given condition positions. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the input data to gain normality. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - A callable trend function. Should have the signature: f(x, [y, z, ...]) - This is used for detrended kriging, where the trended is subtracted - from the conditions before kriging is applied. - This can be used for regression kriging, where the trend function - is determined by an external regression algorithm. - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - exact : :class:`bool`, optional - Whether the interpolator should reproduce the exact input values. - If `False`, `cond_err` is interpreted as measurement error - at the conditioning points and the result will be more smooth. - Default: False - cond_err : :class:`str`, :class :class:`float` or :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied to - all points or an array with individual values for each point. - The measurement error has to be <= nugget. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - pseudo_inv : :class:`bool`, optional - Whether the kriging system is solved with the pseudo inverted - kriging matrix. If `True`, this leads to more numerical stability - and redundant points are averaged. But it can take more time. - Default: True - pseudo_inv_type : :class:`str` or :any:`callable`, optional - Here you can select the algorithm to compute the pseudo-inverse matrix: - - * `"pinv"`: use `pinv` from `scipy` which uses `SVD` - * `"pinvh"`: use `pinvh` from `scipy` which uses eigen-values - - If you want to use another routine to invert the kriging matrix, - you can pass a callable which takes a matrix and returns the inverse. - Default: `"pinv"` - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given conditioning data. - Default: False - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - """ - - def __init__( - self, - model, - cond_pos, - cond_val, - ext_drift, - normalizer=None, - trend=None, - exact=False, - cond_err="nugget", - pseudo_inv=True, - pseudo_inv_type="pinv", - fit_normalizer=False, - fit_variogram=False, - ): - super().__init__( - model, - cond_pos, - cond_val, - ext_drift=ext_drift, - normalizer=normalizer, - trend=trend, - exact=exact, - cond_err=cond_err, - pseudo_inv=pseudo_inv, - pseudo_inv_type=pseudo_inv_type, - fit_normalizer=fit_normalizer, - fit_variogram=fit_variogram, - ) - - -class Detrended(Krige): - """ - Detrended simple kriging. - - In detrended kriging, the data is detrended before interpolation by - simple kriging with zero mean. - - The trend needs to be a callable function the user has to provide. - This can be used for regression kriging, where the trend function - is determined by an external regression algorithm. - - This is just a shortcut for simple kriging with a given trend function, - zero mean and no normalizer. - - A trend can be given with EVERY provided kriging routine. - - Parameters - ---------- - model : :any:`CovModel` - Covariance Model used for kriging. - cond_pos : :class:`list` - tuple, containing the given condition positions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - trend_function : :any:`callable` - The callable trend function. Should have the signature: f(x, [y, z]) - exact : :class:`bool`, optional - Whether the interpolator should reproduce the exact input values. - If `False`, `cond_err` is interpreted as measurement error - at the conditioning points and the result will be more smooth. - Default: False - cond_err : :class:`str`, :class :class:`float` or :class:`list`, optional - The measurement error at the conditioning points. - Either "nugget" to apply the model-nugget, a single value applied to - all points or an array with individual values for each point. - The measurement error has to be <= nugget. - The "exact=True" variant only works with "cond_err='nugget'". - Default: "nugget" - pseudo_inv : :class:`bool`, optional - Whether the kriging system is solved with the pseudo inverted - kriging matrix. If `True`, this leads to more numerical stability - and redundant points are averaged. But it can take more time. - Default: True - pseudo_inv_type : :class:`str` or :any:`callable`, optional - Here you can select the algorithm to compute the pseudo-inverse matrix: - - * `"pinv"`: use `pinv` from `scipy` which uses `SVD` - * `"pinvh"`: use `pinvh` from `scipy` which uses eigen-values - - If you want to use another routine to invert the kriging matrix, - you can pass a callable which takes a matrix and returns the inverse. - Default: `"pinv"` - fit_variogram : :class:`bool`, optional - Whether to fit the given variogram model to the data. - Directional variogram fitting is triggered by setting - any anisotropy factor of the model to anything unequal 1 - but the main axes of correlation are taken from the model - rotation angles. If the model is a spatio-temporal latlon - model, this will raise an error. - This assumes the sill to be the data variance and with - standard bins provided by the :any:`standard_bins` routine. - Default: False - """ - - def __init__( - self, - model, - cond_pos, - cond_val, - trend, - exact=False, - cond_err="nugget", - pseudo_inv=True, - pseudo_inv_type="pinv", - fit_variogram=False, - ): - super().__init__( - model, - cond_pos, - cond_val, - trend=trend, - unbiased=False, - exact=exact, - cond_err=cond_err, - pseudo_inv=pseudo_inv, - pseudo_inv_type=pseudo_inv_type, - fit_variogram=fit_variogram, - ) diff --git a/src/gstools_cython/krige/tools.py b/src/gstools_cython/krige/tools.py deleted file mode 100644 index 62926595..00000000 --- a/src/gstools_cython/krige/tools.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -GStools subpackage providing tools for Kriging. - -.. currentmodule:: gstools.krige.tools - -The following classes and functions are provided - -.. autosummary:: - set_condition - get_drift_functions -""" - -# pylint: disable=C0103 -from itertools import combinations_with_replacement - -import numpy as np - -__all__ = ["set_condition", "get_drift_functions"] - - -def set_condition(cond_pos, cond_val, dim): - """ - Set the conditions for kriging. - - Parameters - ---------- - cond_pos : :class:`list` - the position tuple of the conditions (x, [y, z]) - cond_val : :class:`numpy.ndarray` - the values of the conditions (nan values will be ignored) - dim : :class:`int`, optional - Spatial dimension - - Raises - ------ - ValueError - If the given data does not match the given dimension. - - Returns - ------- - cond_pos : :class:`list` - the error checked cond_pos with all finite values - cond_val : :class:`numpy.ndarray` - the error checked cond_val for all finite cond_pos values - """ - # convert the input for right shapes and dimension checks - cond_val = np.asarray(cond_val, dtype=np.double).reshape(-1) - cond_pos = np.asarray(cond_pos, dtype=np.double).reshape(dim, -1) - if len(cond_pos[0]) != len(cond_val): - raise ValueError( - "Please check your 'cond_pos' and 'cond_val' parameters. " - "The shapes do not match." - ) - mask = np.isfinite(cond_val) - return cond_pos[:, mask], cond_val[mask] - - -def get_drift_functions(dim, drift_type): - """ - Get functions for a given drift type in universal kriging. - - Parameters - ---------- - dim : :class:`int` - Given dimension. - drift_type : :class:`str` or :class:`int` - Drift type: 'linear' or 'quadratic' or an integer for the polynomial - order of the drift type. (linear equals 1, quadratic equals 2 ...) - - Returns - ------- - :class:`list` of :any:`callable` - List of drift functions. - """ - if drift_type in ["lin", "linear"]: - drift_type = 1 - elif drift_type in ["quad", "quadratic"]: - drift_type = 2 - else: - drift_type = int(drift_type) - drift_functions = [] - for d in range(drift_type): - selects = combinations_with_replacement(range(dim), d + 1) - for select in selects: - drift_functions.append(_f_factory(select)) - return drift_functions - - -def _f_factory(select): - def f(*pos): - res = 1.0 - for i in select: - res *= np.asarray(pos[i]) - return res - - return f diff --git a/src/gstools_cython/normalizer/__init__.py b/src/gstools_cython/normalizer/__init__.py deleted file mode 100644 index 505a6d67..00000000 --- a/src/gstools_cython/normalizer/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -""" -GStools subpackage providing normalization routines. - -.. currentmodule:: gstools.normalizer - -Base-Normalizer -^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - Normalizer - -Field-Normalizer -^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - LogNormal - BoxCox - BoxCoxShift - YeoJohnson - Modulus - Manly - -Convenience Routines -^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - apply_mean_norm_trend - remove_trend_norm_mean -""" - -from gstools.normalizer.base import Normalizer -from gstools.normalizer.methods import ( - BoxCox, - BoxCoxShift, - LogNormal, - Manly, - Modulus, - YeoJohnson, -) -from gstools.normalizer.tools import ( - apply_mean_norm_trend, - remove_trend_norm_mean, -) - -__all__ = [ - "Normalizer", - "LogNormal", - "BoxCox", - "BoxCoxShift", - "YeoJohnson", - "Modulus", - "Manly", - "apply_mean_norm_trend", - "remove_trend_norm_mean", -] diff --git a/src/gstools_cython/normalizer/base.py b/src/gstools_cython/normalizer/base.py deleted file mode 100644 index 4a8477c6..00000000 --- a/src/gstools_cython/normalizer/base.py +++ /dev/null @@ -1,260 +0,0 @@ -""" -GStools subpackage providing the base class for normalizers. - -.. currentmodule:: gstools.normalizer.base - -The following classes are provided - -.. autosummary:: - Normalizer -""" - -# pylint: disable=R0201 -import warnings - -import numpy as np -import scipy.misc as spm -import scipy.optimize as spo - - -class Normalizer: - """Normalizer class. - - Parameters - ---------- - data : array_like, optional - Input data to fit the transformation to in order to gain normality. - The default is None. - **parameter - Specified parameters given by name. If not given, default parameters - will be used. - """ - - default_parameter = {} - """:class:`dict`: Default parameters of the Normalizer.""" - normalize_range = (-np.inf, np.inf) - """:class:`tuple`: Valid range for input data.""" - denormalize_range = (-np.inf, np.inf) - """:class:`tuple`: Valid range for output/normal data.""" - _dx = 1e-6 # dx for numerical derivative - - def __init__(self, data=None, **parameter): - # only use parameter, that have a provided default value - for key, value in self.default_parameter.items(): - setattr(self, key, parameter.get(key, value)) - # fit parameters if data is given - if data is not None: - self.fit(data) - # optimization results - self._opti = None - # precision for printing - self._prec = 3 - - def _denormalize(self, data): - return data - - def _normalize(self, data): - return data - - def _derivative(self, data): - return spm.derivative(self._normalize, data, dx=self._dx) - - def _loglikelihood(self, data): - add = -0.5 * np.size(data) * (np.log(2 * np.pi) + 1) - return self._kernel_loglikelihood(data) + add - - def _kernel_loglikelihood(self, data): - res = -0.5 * np.size(data) * np.log(np.var(self._normalize(data))) - return res + np.sum(np.log(np.maximum(1e-16, self._derivative(data)))) - - def _check_input(self, data, data_range=None, return_output_template=True): - is_data = np.logical_not(np.isnan(data)) - if return_output_template: - out = np.full_like(data, np.nan, dtype=np.double) - data = np.asarray(data, dtype=np.double)[is_data] - if data_range is not None and np.min(np.abs(data_range)) < np.inf: - dat_in = np.logical_and(data > data_range[0], data < data_range[1]) - if not np.all(dat_in): - warnings.warn( - f"{self.name}: " - f"data (min: {np.min(data)}, max: {np.max(data)}) " - f"out of range: {data_range}. " - "Affected values will be treated as NaN." - ) - is_data[is_data] &= dat_in - data = data[dat_in] - if return_output_template: - return data, is_data, out - return data - - def denormalize(self, data): - """Transform to input distribution. - - Parameters - ---------- - data : array_like - Input data (normal distributed). - - Returns - ------- - :class:`numpy.ndarray` - Denormalized data. - """ - data, is_data, out = self._check_input(data, self.denormalize_range) - out[is_data] = self._denormalize(data) - return out - - def normalize(self, data): - """Transform to normal distribution. - - Parameters - ---------- - data : array_like - Input data (not normal distributed). - - Returns - ------- - :class:`numpy.ndarray` - Normalized data. - """ - data, is_data, out = self._check_input(data, self.normalize_range) - out[is_data] = self._normalize(data) - return out - - def derivative(self, data): - """Factor for normal PDF to gain target PDF. - - Parameters - ---------- - data : array_like - Input data (not normal distributed). - - Returns - ------- - :class:`numpy.ndarray` - Derivative of the normalization transformation function. - """ - data, is_data, out = self._check_input(data, self.normalize_range) - out[is_data] = self._derivative(data) - return out - - def likelihood(self, data): - """Likelihood for given data with current parameters. - - Parameters - ---------- - data : array_like - Input data to fit the transformation to in order to gain normality. - - Returns - ------- - :class:`float` - Likelihood of the given data. - """ - return np.exp(self.loglikelihood(data)) - - def loglikelihood(self, data): - """Log-Likelihood for given data with current parameters. - - Parameters - ---------- - data : array_like - Input data to fit the transformation to in order to gain normality. - - Returns - ------- - :class:`float` - Log-Likelihood of the given data. - """ - data = self._check_input(data, self.normalize_range, False) - return self._loglikelihood(data) - - def kernel_loglikelihood(self, data): - """Kernel Log-Likelihood for given data with current parameters. - - Parameters - ---------- - data : array_like - Input data to fit the transformation to in order to gain normality. - - Returns - ------- - :class:`float` - Kernel Log-Likelihood of the given data. - - Notes - ----- - This loglikelihood function is neglecting additive constants, - that are not needed for optimization. - """ - data = self._check_input(data, self.normalize_range, False) - return self._kernel_loglikelihood(data) - - def fit(self, data, skip=None, **kwargs): - """Fitting the transformation to data by maximizing Log-Likelihood. - - Parameters - ---------- - data : array_like - Input data to fit the transformation to in order to gain normality. - skip : :class:`list` of :class:`str` or :any:`None`, optional - Names of parameters to be skipped in fitting. - The default is None. - **kwargs - Keyword arguments passed to :any:`scipy.optimize.minimize_scalar` - when only one parameter present or :any:`scipy.optimize.minimize`. - - Returns - ------- - :class:`dict` - Optimal parameters given by names. - """ - skip = [] if skip is None else skip - all_names = sorted(self.default_parameter) - para_names = [name for name in all_names if name not in skip] - - def _neg_kllf(par, dat): - for name, val in zip(para_names, np.atleast_1d(par)): - setattr(self, name, val) - return -self.kernel_loglikelihood(dat) - - if len(para_names) == 0: # transformations without para. (no opti.) - warnings.warn(f"{self.name}.fit: no parameters!") - return {} - if len(para_names) == 1: # one-para. transformations (simple opti.) - # default bracket like in scipy's boxcox (if not given) - kwargs.setdefault("bracket", (-2, 2)) - out = spo.minimize_scalar(_neg_kllf, args=(data,), **kwargs) - else: # general case - # init guess from current parameters (if x0 not given) - kwargs.setdefault("x0", [getattr(self, p) for p in para_names]) - out = spo.minimize(_neg_kllf, args=(data,), **kwargs) - # save optimization results - self._opti = out - for name, val in zip(para_names, np.atleast_1d(out.x)): - setattr(self, name, val) - return {name: getattr(self, name) for name in all_names} - - def __eq__(self, other): - """Compare Normalizers.""" - # check for correct base class - if type(self) is not type(other): - return False - # if base class is same, this is safe - for val in self.default_parameter: - if not np.isclose(getattr(self, val), getattr(other, val)): - return False - return True - - @property - def name(self): - """:class:`str`: The name of the normalizer class.""" - return self.__class__.__name__ - - def __repr__(self): - """Return String representation.""" - para_strs = [ - f"{p}={float(getattr(self, p)):.{self._prec}}" - for p in sorted(self.default_parameter) - ] - return f"{self.name}({', '.join(para_strs)})" diff --git a/src/gstools_cython/normalizer/methods.py b/src/gstools_cython/normalizer/methods.py deleted file mode 100644 index a46dc230..00000000 --- a/src/gstools_cython/normalizer/methods.py +++ /dev/null @@ -1,363 +0,0 @@ -""" -GStools subpackage providing different normalizer transformations. - -.. currentmodule:: gstools.normalizer.methods - -The following classes are provided - -.. autosummary:: - LogNormal - BoxCox - BoxCoxShift - YeoJohnson - Modulus - Manly -""" - -# pylint: disable=E1101 -import numpy as np - -from gstools.normalizer.base import Normalizer - - -class LogNormal(Normalizer): - r"""Log-normal fields. - - Notes - ----- - This parameter-free transformation is given by: - - .. math:: - y=\log(x) - """ - - normalize_range = (0.0, np.inf) - """Valid range for input data.""" - - def _denormalize(self, data): - return np.exp(data) - - def _normalize(self, data): - return np.log(data) - - def _derivative(self, data): - return np.power(data, -1) - - -class BoxCox(Normalizer): - r"""Box-Cox (1964) transformed fields. - - Parameters - ---------- - data : array_like, optional - Input data to fit the transformation in order to gain normality. - The default is None. - lmbda : :class:`float`, optional - Shape parameter. Default: 1 - - Notes - ----- - This transformation is given by [Box1964]_: - - .. math:: - y=\begin{cases} - \frac{x^{\lambda} - 1}{\lambda} & \lambda\neq 0 \\ - \log(x) & \lambda = 0 - \end{cases} - - References - ---------- - .. [Box1964] G.E.P. Box and D.R. Cox, - "An Analysis of Transformations", - Journal of the Royal Statistical Society B, 26, 211-252, (1964) - """ - - default_parameter = {"lmbda": 1} - """:class:`dict`: Default parameter of the BoxCox-Normalizer.""" - normalize_range = (0.0, np.inf) - """:class:`tuple`: Valid range for input data.""" - - @property - def denormalize_range(self): - """:class:`tuple`: Valid range for output data depending on lmbda. - - `(-1/lmbda, inf)` or `(-inf, -1/lmbda)` - """ - if np.isclose(self.lmbda, 0): - return (-np.inf, np.inf) - if self.lmbda < 0: - return (-np.inf, -np.divide(1, self.lmbda)) - return (-np.divide(1, self.lmbda), np.inf) - - def _denormalize(self, data): - if np.isclose(self.lmbda, 0): - return np.exp(data) - return (1 + np.multiply(data, self.lmbda)) ** (1 / self.lmbda) - - def _normalize(self, data): - if np.isclose(self.lmbda, 0): - return np.log(data) - return (np.power(data, self.lmbda) - 1) / self.lmbda - - def _derivative(self, data): - return np.power(data, self.lmbda - 1) - - -class BoxCoxShift(Normalizer): - r"""Box-Cox (1964) transformed fields including shifting. - - Parameters - ---------- - data : array_like, optional - Input data to fit the transformation in order to gain normality. - The default is None. - lmbda : :class:`float`, optional - Shape parameter. Default: 1 - shift : :class:`float`, optional - Shift parameter. Default: 0 - - Notes - ----- - This transformation is given by [Box1964]_: - - .. math:: - y=\begin{cases} - \frac{(x+s)^{\lambda} - 1}{\lambda} & \lambda\neq 0 \\ - \log(x+s) & \lambda = 0 - \end{cases} - - Fitting the shift parameter is rather hard. You should consider skipping - "shift" during fitting: - - >>> data = range(5) - >>> norm = BoxCoxShift(shift=0.5) - >>> norm.fit(data, skip=["shift"]) - {'shift': 0.5, 'lmbda': 0.6747515267420799} - - References - ---------- - .. [Box1964] G.E.P. Box and D.R. Cox, - "An Analysis of Transformations", - Journal of the Royal Statistical Society B, 26, 211-252, (1964) - """ - - default_parameter = {"shift": 0, "lmbda": 1} - """:class:`dict`: Default parameters of the BoxCoxShift-Normalizer.""" - - @property - def normalize_range(self): - """:class:`tuple`: Valid range for input data depending on shift. - - `(-shift, inf)` - """ - return (-self.shift, np.inf) - - @property - def denormalize_range(self): - """:class:`tuple`: Valid range for output data depending on lmbda. - - `(-1/lmbda, inf)` or `(-inf, -1/lmbda)` - """ - if np.isclose(self.lmbda, 0): - return (-np.inf, np.inf) - if self.lmbda < 0: - return (-np.inf, -np.divide(1, self.lmbda)) - return (-np.divide(1, self.lmbda), np.inf) - - def _denormalize(self, data): - if np.isclose(self.lmbda, 0): - return np.exp(data) - self.shift - return (1 + np.multiply(data, self.lmbda)) ** ( - 1 / self.lmbda - ) - self.shift - - def _normalize(self, data): - if np.isclose(self.lmbda, 0): - return np.log(np.add(data, self.shift)) - return (np.add(data, self.shift) ** self.lmbda - 1) / self.lmbda - - def _derivative(self, data): - return np.power(np.add(data, self.shift), self.lmbda - 1) - - -class YeoJohnson(Normalizer): - r"""Yeo-Johnson (2000) transformed fields. - - Parameters - ---------- - data : array_like, optional - Input data to fit the transformation in order to gain normality. - The default is None. - lmbda : :class:`float`, optional - Shape parameter. Default: 1 - - Notes - ----- - This transformation is given by [Yeo2000]_: - - .. math:: - y=\begin{cases} - \frac{(x+1)^{\lambda} - 1}{\lambda} - & x\geq 0,\, \lambda\neq 0 \\ - \log(x+1) - & x\geq 0,\, \lambda = 0 \\ - -\frac{(|x|+1)^{2-\lambda} - 1}{2-\lambda} - & x<0,\, \lambda\neq 2 \\ - -\log(|x|+1) - & x<0,\, \lambda = 2 - \end{cases} - - - References - ---------- - .. [Yeo2000] I.K. Yeo and R.A. Johnson, - "A new family of power transformations to improve normality or - symmetry." Biometrika, 87(4), pp.954-959, (2000). - """ - - default_parameter = {"lmbda": 1} - """:class:`dict`: Default parameter of the YeoJohnson-Normalizer.""" - - def _denormalize(self, data): - data = np.asanyarray(data) - res = np.zeros_like(data, dtype=np.double) - pos = data >= 0 - # when data >= 0 - if np.isclose(self.lmbda, 0): - res[pos] = np.expm1(data[pos]) - else: # self.lmbda != 0 - res[pos] = np.power(data[pos] * self.lmbda + 1, 1 / self.lmbda) - 1 - # when data < 0 - if np.isclose(self.lmbda, 2): - res[~pos] = -np.expm1(-data[~pos]) - else: # self.lmbda != 2 - res[~pos] = 1 - np.power( - -(2 - self.lmbda) * data[~pos] + 1, 1 / (2 - self.lmbda) - ) - return res - - def _normalize(self, data): - data = np.asanyarray(data) - res = np.zeros_like(data, dtype=np.double) - pos = data >= 0 - # when data >= 0 - if np.isclose(self.lmbda, 0): - res[pos] = np.log1p(data[pos]) - else: # self.lmbda != 0 - res[pos] = (np.power(data[pos] + 1, self.lmbda) - 1) / self.lmbda - # when data < 0 - if np.isclose(self.lmbda, 2): - res[~pos] = -np.log1p(-data[~pos]) - else: # self.lmbda != 2 - res[~pos] = -(np.power(-data[~pos] + 1, 2 - self.lmbda) - 1) / ( - 2 - self.lmbda - ) - return res - - def _derivative(self, data): - return (np.abs(data) + 1) ** (np.sign(data) * (self.lmbda - 1)) - - -class Modulus(Normalizer): - r"""Modulus or John-Draper (1980) transformed fields. - - Parameters - ---------- - data : array_like, optional - Input data to fit the transformation in order to gain normality. - The default is None. - lmbda : :class:`float`, optional - Shape parameter. Default: 1 - - Notes - ----- - This transformation is given by [John1980]_: - - .. math:: - y=\begin{cases} - \mathrm{sgn}(x)\frac{(|x|+1)^{\lambda} - 1}{\lambda} & \lambda\neq 0 \\ - \mathrm{sgn}(x)\log(|x|+1) & \lambda = 0 - \end{cases} - - References - ---------- - .. [John1980] J. A. John, and N. R. Draper, - "An Alternative Family of Transformations." Journal - of the Royal Statistical Society C, 29.2, 190-197, (1980) - """ - - default_parameter = {"lmbda": 1} - """:class:`dict`: Default parameter of the Modulus-Normalizer.""" - - def _denormalize(self, data): - if np.isclose(self.lmbda, 0): - return np.sign(data) * np.expm1(np.abs(data)) - return np.sign(data) * ( - (1 + self.lmbda * np.abs(data)) ** (1 / self.lmbda) - 1 - ) - - def _normalize(self, data): - if np.isclose(self.lmbda, 0): - return np.sign(data) * np.log1p(np.abs(data)) - return ( - np.sign(data) * ((np.abs(data) + 1) ** self.lmbda - 1) / self.lmbda - ) - - def _derivative(self, data): - return np.power(np.abs(data) + 1, self.lmbda - 1) - - -class Manly(Normalizer): - r"""Manly (1971) transformed fields. - - Parameters - ---------- - data : array_like, optional - Input data to fit the transformation in order to gain normality. - The default is None. - lmbda : :class:`float`, optional - Shape parameter. Default: 1 - - Notes - ----- - This transformation is given by [Manly1976]_: - - .. math:: - y=\begin{cases} - \frac{\exp(\lambda x) - 1}{\lambda} & \lambda\neq 0 \\ - x & \lambda = 0 - \end{cases} - - References - ---------- - .. [Manly1976] B. F. J. Manly, "Exponential data transformations.", - Journal of the Royal Statistical Society D, 25.1, 37-42 (1976). - """ - - default_parameter = {"lmbda": 1} - """:class:`dict`: Default parameter of the Manly-Normalizer.""" - - @property - def denormalize_range(self): - """:class:`tuple`: Valid range for output data depending on lmbda. - - `(-1/lmbda, inf)` or `(-inf, -1/lmbda)` - """ - if np.isclose(self.lmbda, 0): - return (-np.inf, np.inf) - if self.lmbda < 0: - return (-np.inf, np.divide(1, self.lmbda)) - return (-np.divide(1, self.lmbda), np.inf) - - def _denormalize(self, data): - if np.isclose(self.lmbda, 0): - return data - return np.log1p(np.multiply(data, self.lmbda)) / self.lmbda - - def _normalize(self, data): - if np.isclose(self.lmbda, 0): - return data - return np.expm1(np.multiply(data, self.lmbda)) / self.lmbda - - def _derivative(self, data): - return np.exp(np.multiply(data, self.lmbda)) diff --git a/src/gstools_cython/normalizer/tools.py b/src/gstools_cython/normalizer/tools.py deleted file mode 100644 index 3e395d29..00000000 --- a/src/gstools_cython/normalizer/tools.py +++ /dev/null @@ -1,186 +0,0 @@ -""" -GStools subpackage providing tools for Normalizers. - -.. currentmodule:: gstools.normalizer.tools - -The following classes and functions are provided - -.. autosummary:: - apply_mean_norm_trend - remove_trend_norm_mean -""" - -import numpy as np - -from gstools.normalizer.base import Normalizer -from gstools.tools.geometric import ( - format_struct_pos_shape, - format_unstruct_pos_shape, -) -from gstools.tools.misc import eval_func - -__all__ = ["apply_mean_norm_trend", "remove_trend_norm_mean"] - - -def _check_normalizer(normalizer): - if isinstance(normalizer, type) and issubclass(normalizer, Normalizer): - normalizer = normalizer() - elif normalizer is None: - normalizer = Normalizer() - elif not isinstance(normalizer, Normalizer): - raise ValueError("Check: 'normalizer' not of type 'Normalizer'.") - return normalizer - - -def apply_mean_norm_trend( - pos, - field, - mean=None, - normalizer=None, - trend=None, - mesh_type="unstructured", - value_type="scalar", - check_shape=True, - stacked=False, -): - """ - Apply mean, de-normalization and trend to given field. - - Parameters - ---------- - pos : :any:`iterable` - Position tuple, containing main direction and transversal directions. - field : :class:`numpy.ndarray` or :class:`list` of :class:`numpy.ndarray` - The spatially distributed data. - You can pass a list of fields, that will be used simultaneously. - Then you need to set ``stacked=True``. - mean : :any:`None` or :class:`float` or :any:`callable`, optional - Mean of the field if wanted. Could also be a callable. - The default is None. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the field. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - Trend of the denormalized fields. If no normalizer is applied, - this behaves equal to 'mean'. - The default is None. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: 'unstructured' - value_type : :class:`str`, optional - Value type of the field. Either "scalar" or "vector". - The default is "scalar". - check_shape : :class:`bool`, optional - Whether to check pos and field shapes. The default is True. - stacked : :class:`bool`, optional - Whether the field is stacked or not. The default is False. - - Returns - ------- - field : :class:`numpy.ndarray` - The transformed field. - """ - normalizer = _check_normalizer(normalizer) - if check_shape: - if mesh_type != "unstructured": - pos, shape, dim = format_struct_pos_shape( - pos, field.shape, check_stacked_shape=stacked - ) - else: - pos, shape, dim = format_unstruct_pos_shape( - pos, field.shape, check_stacked_shape=stacked - ) - field = np.asarray(field, dtype=np.double).reshape(shape) - else: - dim = len(pos) - if not stacked: - field = [field] - field_cnt = len(field) - for i in range(field_cnt): - field[i] += eval_func(mean, pos, dim, mesh_type, value_type, True) - field = normalizer.denormalize(field) - for i in range(field_cnt): - field[i] += eval_func(trend, pos, dim, mesh_type, value_type, True) - return field if stacked else field[0] - - -def remove_trend_norm_mean( - pos, - field, - mean=None, - normalizer=None, - trend=None, - mesh_type="unstructured", - value_type="scalar", - check_shape=True, - stacked=False, - fit_normalizer=False, -): - """ - Remove trend, de-normalization and mean from given field. - - Parameters - ---------- - pos : :any:`iterable` - Position tuple, containing main direction and transversal directions. - field : :class:`numpy.ndarray` or :class:`list` of :class:`numpy.ndarray` - The spatially distributed data. - You can pass a list of fields, that will be used simultaneously. - Then you need to set ``stacked=True``. - mean : :any:`None` or :class:`float` or :any:`callable`, optional - Mean of the field if wanted. Could also be a callable. - The default is None. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the field. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - Trend of the denormalized fields. If no normalizer is applied, - this behaves equal to 'mean'. - The default is None. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: 'unstructured' - value_type : :class:`str`, optional - Value type of the field. Either "scalar" or "vector". - The default is "scalar". - check_shape : :class:`bool`, optional - Whether to check pos and field shapes. The default is True. - stacked : :class:`bool`, optional - Whether the field is stacked or not. The default is False. - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given (detrended) field. - Default: False - - Returns - ------- - field : :class:`numpy.ndarray` - The cleaned field. - normalizer : :any:`Normalizer`, optional - The fitted normalizer for the given data. - Only provided if `fit_normalizer` is True. - """ - normalizer = _check_normalizer(normalizer) - if check_shape: - if mesh_type != "unstructured": - pos, shape, dim = format_struct_pos_shape( - pos, field.shape, check_stacked_shape=stacked - ) - else: - pos, shape, dim = format_unstruct_pos_shape( - pos, field.shape, check_stacked_shape=stacked - ) - field = np.asarray(field, dtype=np.double).reshape(shape) - else: - dim = len(pos) - if not stacked: - field = [field] - field_cnt = len(field) - for i in range(field_cnt): - field[i] -= eval_func(trend, pos, dim, mesh_type, value_type, True) - if fit_normalizer: - normalizer.fit(field) - field = normalizer.normalize(field) - for i in range(field_cnt): - field[i] -= eval_func(mean, pos, dim, mesh_type, value_type, True) - out = field if stacked else field[0] - return (out, normalizer) if fit_normalizer else out diff --git a/src/gstools_cython/random/__init__.py b/src/gstools_cython/random/__init__.py deleted file mode 100644 index af8f7378..00000000 --- a/src/gstools_cython/random/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -GStools subpackage for random number generation. - -.. currentmodule:: gstools.random - -Random Number Generator -^^^^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - RNG - -Seed Generator -^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - MasterRNG - -Distribution factory -^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - dist_gen - ----- -""" - -from gstools.random.rng import RNG -from gstools.random.tools import MasterRNG, dist_gen - -__all__ = ["RNG", "MasterRNG", "dist_gen"] diff --git a/src/gstools_cython/random/rng.py b/src/gstools_cython/random/rng.py deleted file mode 100644 index ad07c6aa..00000000 --- a/src/gstools_cython/random/rng.py +++ /dev/null @@ -1,221 +0,0 @@ -""" -GStools subpackage providing the core of the spatial random field generation. - -.. currentmodule:: gstools.random.rng - -The following classes are provided - -.. autosummary:: - RNG -""" - -# pylint: disable=E1101 -import emcee as mc -import numpy as np -import numpy.random as rand -from emcee.state import State - -from gstools.random.tools import MasterRNG, dist_gen - -__all__ = ["RNG"] - - -class RNG: - """ - A random number generator for different distributions and multiple streams. - - Parameters - ---------- - seed : :class:`int` or :any:`None`, optional - The seed of the master RNG, if ``None``, - a random seed is used. Default: ``None`` - """ - - def __init__(self, seed=None): - # set seed - self._master_rng = None - self.seed = seed - - def sample_ln_pdf( - self, - ln_pdf, - size=None, - sample_around=1.0, - nwalkers=50, - burn_in=20, - oversampling_factor=10, - ): - """Sample from a distribution given by ln(pdf). - - This algorithm uses the :class:`emcee.EnsembleSampler` - - Parameters - ---------- - ln_pdf : :any:`callable` - The logarithm of the Probability density function - of the given distribution, that takes a single argument - size : :class:`int` or :any:`None`, optional - sample size. Default: None - sample_around : :class:`float`, optional - Starting point for initial guess Default: 1. - nwalkers : :class:`int`, optional - The number of walkers in the mcmc sampler. Used for the - emcee.EnsembleSampler class. - Default: 50 - burn_in : :class:`int`, optional - Number of burn-in runs in the mcmc algorithm. - Default: 20 - oversampling_factor : :class:`int`, optional - To guess the sample number needed for proper results, we use a - factor for oversampling. The intern used sample-size is - calculated by - - ``sample_size = max(burn_in, (size/nwalkers)*oversampling_factor)`` - - So at least, as much as the burn-in runs. - Default: 10 - """ - if size is None: # pragma: no cover - sample_size = burn_in - else: - sample_size = max(burn_in, (size / nwalkers) * oversampling_factor) - # sample_size needs to be integer for emcee >= 3.1 - sample_size = int(sample_size) - # initial guess - init_guess = ( - self.random.rand(nwalkers).reshape((nwalkers, 1)) * sample_around - ) - # initialize the sampler - sampler = mc.EnsembleSampler(nwalkers, 1, ln_pdf, vectorize=True) - # burn in phase with saving of last position - initial_state = State(init_guess, copy=True) - initial_state.random_state = self.random.get_state() - burn_in_state = sampler.run_mcmc( - initial_state=initial_state, nsteps=burn_in - ) - # reset after burn_in - sampler.reset() - # actual sampling - initial_state = State(burn_in_state, copy=True) - initial_state.random_state = self.random.get_state() - sampler.run_mcmc(initial_state=initial_state, nsteps=sample_size) - samples = sampler.get_chain(flat=True)[:, 0] - - # choose samples according to size - return self.random.choice(samples, size) - - def sample_dist(self, pdf=None, cdf=None, ppf=None, size=None, **kwargs): - """Sample from a distribution given by pdf, cdf and/or ppf. - - Parameters - ---------- - pdf : :any:`callable` or :any:`None`, optional - Probability density function of the given distribution, - that takes a single argument - Default: ``None`` - cdf : :any:`callable` or :any:`None`, optional - Cumulative distribution function of the given distribution, that - takes a single argument - Default: ``None`` - ppf : :any:`callable` or :any:`None`, optional - Percent point function of the given distribution, that - takes a single argument - Default: ``None`` - size : :class:`int` or :any:`None`, optional - sample size. Default: None - **kwargs - Keyword-arguments that are forwarded to - :any:`scipy.stats.rv_continuous`. - - Returns - ------- - samples : :class:`float` or :class:`numpy.ndarray` - the samples from the given distribution - - Notes - ----- - At least pdf or cdf needs to be given. - """ - kwargs["seed"] = self.random - dist = dist_gen(pdf_in=pdf, cdf_in=cdf, ppf_in=ppf, **kwargs) - return dist.rvs(size=size) - - def sample_sphere(self, dim, size=None): - """Uniform sampling on a d-dimensional sphere. - - Parameters - ---------- - dim : :class:`int` - Dimension of the sphere. Just 1, 2, and 3 supported. - size : :class:`int`, optional - sample size - - Returns - ------- - coord : :class:`numpy.ndarray` - x[, y[, z]] coordinates on the sphere with shape (dim, size) - """ - if size is None: # pragma: no cover - coord = np.empty((dim, 1), dtype=np.double) - else: - coord = np.empty( # saver conversion of size to resulting shape - (dim,) + tuple(np.atleast_1d(size)), dtype=np.double - ) - if dim == 1: - coord[0] = self.random.choice([-1, 1], size=size) - elif dim == 2: - ang1 = self.random.uniform(0.0, 2 * np.pi, size) - coord[0] = np.cos(ang1) - coord[1] = np.sin(ang1) - elif dim == 3: - ang1 = self.random.uniform(0.0, 2 * np.pi, size) - ang2 = self.random.uniform(-1.0, 1.0, size) - coord[0] = np.sqrt(1.0 - ang2**2) * np.cos(ang1) - coord[1] = np.sqrt(1.0 - ang2**2) * np.sin(ang1) - coord[2] = ang2 - else: # pragma: no cover - # http://corysimon.github.io/articles/uniformdistn-on-sphere/ - coord = self.random.normal(size=coord.shape) - while True: # loop until all norms are non-zero - norm = np.linalg.norm(coord, axis=0) - # check for zero norms - zero_norms = np.isclose(norm, 0) - # exit the loop if all norms are non-zero - if not np.any(zero_norms): - break - # transpose, since the next transpose reverses axis order - zero_samples = zero_norms.T.nonzero() - # need to transpose to have dim-axis last - new_shape = coord.T[zero_samples].shape - # resample the zero norm samples - coord.T[zero_samples] = self.random.normal(size=new_shape) - # project onto sphere - coord = coord / norm - return np.reshape(coord, dim) if size is None else coord - - @property - def random(self): - """:any:`numpy.random.RandomState`: Randomstate. - - Get a stream to the numpy Random number generator. - You can use this, to call any provided distribution - from :any:`numpy.random.RandomState`. - """ - return rand.RandomState(self._master_rng()) - - @property # pragma: no cover - def seed(self): - """:class:`int`: Seed of the master RNG. - - The setter property not only saves the new seed, but also creates - a new master RNG function with the new seed. - """ - return self._master_rng.seed - - @seed.setter - def seed(self, new_seed=None): - self._master_rng = MasterRNG(new_seed) - - def __repr__(self): - """Return String representation.""" - return f"RNG(seed={self.seed})" diff --git a/src/gstools_cython/random/tools.py b/src/gstools_cython/random/tools.py deleted file mode 100644 index d61327ea..00000000 --- a/src/gstools_cython/random/tools.py +++ /dev/null @@ -1,183 +0,0 @@ -""" -GStools subpackage providing tools for random sampling. - -.. currentmodule:: gstools.random.tools - -The following classes are provided - -.. autosummary:: - MasterRNG - dist_gen -""" - -import numpy.random as rand -from scipy.stats import rv_continuous - -__all__ = ["MasterRNG", "dist_gen"] - - -class MasterRNG: - """Master random number generator for generating seeds. - - Parameters - ---------- - seed : :class:`int` or :any:`None`, optional - The seed of the master RNG, if ``None``, - a random seed is used. Default: ``None`` - - """ - - def __init__(self, seed): - self._seed = seed - self._master_rng_fct = rand.RandomState(seed) - self._master_rng = lambda: self._master_rng_fct.randint(1, 2**16) - - def __call__(self): - """Return a random seed.""" - return self._master_rng() - - @property # pragma: no cover - def seed(self): - """:class:`int`: Seed of the master RNG. - - The setter property not only saves the new seed, but also creates - a new master RNG function with the new seed. - """ - return self._seed - - def __repr__(self): - """Return String representation.""" - return f"MasterRNG(seed={self.seed})" - - -def dist_gen(pdf_in=None, cdf_in=None, ppf_in=None, **kwargs): - """Distribution Factory. - - Parameters - ---------- - pdf_in : :any:`callable` or :any:`None`, optional - Probability distribution function of the given distribution, that - takes a single argument - Default: ``None`` - cdf_in : :any:`callable` or :any:`None`, optional - Cumulative distribution function of the given distribution, that - takes a single argument - Default: ``None`` - ppf_in : :any:`callable` or :any:`None`, optional - Percent point function of the given distribution, that - takes a single argument - Default: ``None`` - **kwargs - Keyword-arguments forwarded to :any:`scipy.stats.rv_continuous`. - - Returns - ------- - dist : :class:`scipy.stats.rv_continuous` - The constructed distribution. - - Notes - ----- - At least pdf or cdf needs to be given. - """ - if ppf_in is None: - if pdf_in is not None and cdf_in is None: - return DistPdf(pdf_in, **kwargs) - if pdf_in is None and cdf_in is not None: - return DistCdf(cdf_in, **kwargs) - if pdf_in is not None and cdf_in is not None: - return DistPdfCdf(pdf_in, cdf_in, **kwargs) - raise ValueError("Either pdf or cdf must be given") - - if pdf_in is not None and cdf_in is None: - return DistPdfPpf(pdf_in, ppf_in, **kwargs) - if pdf_in is None and cdf_in is not None: - return DistCdfPpf(cdf_in, ppf_in, **kwargs) - if pdf_in is not None and cdf_in is not None: - return DistPdfCdfPpf(pdf_in, cdf_in, ppf_in, **kwargs) - raise ValueError("pdf or cdf must be given along with the ppf") - - -class DistPdf(rv_continuous): - """Generate distribution from pdf.""" - - def __init__(self, pdf_in, **kwargs): - self.pdf_in = pdf_in - super().__init__(**kwargs) - - def _pdf(self, x, *args): - return self.pdf_in(x) - - -class DistCdf(rv_continuous): - """Generate distribution from cdf.""" - - def __init__(self, cdf_in, **kwargs): - self.cdf_in = cdf_in - super().__init__(**kwargs) - - def _cdf(self, x, *args): - return self.cdf_in(x) - - -class DistPdfCdf(rv_continuous): - """Generate distribution from pdf and cdf.""" - - def __init__(self, pdf_in, cdf_in, **kwargs): - self.pdf_in = pdf_in - self.cdf_in = cdf_in - super().__init__(**kwargs) - - def _pdf(self, x, *args): - return self.pdf_in(x) - - def _cdf(self, x, *args): - return self.cdf_in(x) - - -class DistPdfPpf(rv_continuous): - """Generate distribution from pdf and ppf.""" - - def __init__(self, pdf_in, ppf_in, **kwargs): - self.pdf_in = pdf_in - self.ppf_in = ppf_in - super().__init__(**kwargs) - - def _pdf(self, x, *args): - return self.pdf_in(x) - - def _ppf(self, q, *args): - return self.ppf_in(q) - - -class DistCdfPpf(rv_continuous): - """Generate distribution from cdf and ppf.""" - - def __init__(self, cdf_in, ppf_in, **kwargs): - self.cdf_in = cdf_in - self.ppf_in = ppf_in - super().__init__(**kwargs) - - def _cdf(self, x, *args): - return self.cdf_in(x) - - def _ppf(self, q, *args): - return self.ppf_in(q) - - -class DistPdfCdfPpf(rv_continuous): - """Generate distribution from pdf, cdf and ppf.""" - - def __init__(self, pdf_in, cdf_in, ppf_in, **kwargs): - self.pdf_in = pdf_in - self.cdf_in = cdf_in - self.ppf_in = ppf_in - super().__init__(**kwargs) - - def _pdf(self, x, *args): - return self.pdf_in(x) - - def _cdf(self, x, *args): - return self.cdf_in(x) - - def _ppf(self, q, *args): - return self.ppf_in(q) diff --git a/src/gstools_cython/tools/__init__.py b/src/gstools_cython/tools/__init__.py deleted file mode 100644 index 1f68dbaf..00000000 --- a/src/gstools_cython/tools/__init__.py +++ /dev/null @@ -1,159 +0,0 @@ -""" -GStools subpackage providing miscellaneous tools. - -.. currentmodule:: gstools.tools - -Export -^^^^^^ - -.. autosummary:: - :toctree: - - vtk_export - vtk_export_structured - vtk_export_unstructured - to_vtk - to_vtk_structured - to_vtk_unstructured - -Special functions -^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - confidence_scaling - inc_gamma - inc_gamma_low - exp_int - inc_beta - tplstable_cor - tpl_exp_spec_dens - tpl_gau_spec_dens - -Geometric -^^^^^^^^^ - -.. autosummary:: - :toctree: - - rotated_main_axes - set_angles - set_anis - no_of_angles - rotation_planes - givens_rotation - matrix_rotate - matrix_derotate - matrix_isotropify - matrix_anisotropify - matrix_isometrize - matrix_anisometrize - ang2dir - generate_grid - generate_st_grid - -Misc -^^^^ - -.. autosummary:: - EARTH_RADIUS - KM_SCALE - DEGREE_SCALE - RADIAN_SCALE - ----- - -.. autodata:: EARTH_RADIUS - -.. autodata:: KM_SCALE - -.. autodata:: DEGREE_SCALE - -.. autodata:: RADIAN_SCALE -""" - -from gstools.tools.export import ( - to_vtk, - to_vtk_structured, - to_vtk_unstructured, - vtk_export, - vtk_export_structured, - vtk_export_unstructured, -) -from gstools.tools.geometric import ( - ang2dir, - generate_grid, - generate_st_grid, - givens_rotation, - matrix_anisometrize, - matrix_anisotropify, - matrix_derotate, - matrix_isometrize, - matrix_isotropify, - matrix_rotate, - no_of_angles, - rotated_main_axes, - rotation_planes, - set_angles, - set_anis, -) -from gstools.tools.special import ( - confidence_scaling, - exp_int, - inc_beta, - inc_gamma, - inc_gamma_low, - tpl_exp_spec_dens, - tpl_gau_spec_dens, - tplstable_cor, -) - -EARTH_RADIUS = 6371.0 -"""float: earth radius for WGS84 ellipsoid in km""" - -KM_SCALE = 6371.0 -"""float: earth radius for WGS84 ellipsoid in km""" - -DEGREE_SCALE = 57.29577951308232 -"""float: radius for unit sphere in degree""" - -RADIAN_SCALE = 1.0 -"""float: radius for unit sphere""" - - -__all__ = [ - "vtk_export", - "vtk_export_structured", - "vtk_export_unstructured", - "to_vtk", - "to_vtk_structured", - "to_vtk_unstructured", - "confidence_scaling", - "inc_gamma", - "inc_gamma_low", - "exp_int", - "inc_beta", - "tplstable_cor", - "tpl_exp_spec_dens", - "tpl_gau_spec_dens", - "set_angles", - "set_anis", - "no_of_angles", - "rotation_planes", - "givens_rotation", - "matrix_rotate", - "matrix_derotate", - "matrix_isotropify", - "matrix_anisotropify", - "matrix_isometrize", - "matrix_anisometrize", - "rotated_main_axes", - "ang2dir", - "generate_grid", - "generate_st_grid", - "EARTH_RADIUS", - "KM_SCALE", - "DEGREE_SCALE", - "RADIAN_SCALE", -] diff --git a/src/gstools_cython/tools/export.py b/src/gstools_cython/tools/export.py deleted file mode 100644 index 38254ceb..00000000 --- a/src/gstools_cython/tools/export.py +++ /dev/null @@ -1,236 +0,0 @@ -""" -GStools subpackage providing export routines. - -.. currentmodule:: gstools.tools.export - -The following functions are provided - -.. autosummary:: - vtk_export - vtk_export_structured - vtk_export_unstructured - to_vtk - to_vtk_structured - to_vtk_unstructured -""" - -# pylint: disable=C0103, E1101 -import numpy as np -from pyevtk.hl import gridToVTK, pointsToVTK - -try: - import pyvista as pv -except ImportError: - pv = None - -__all__ = [ - "to_vtk_structured", - "vtk_export_structured", - "to_vtk_unstructured", - "vtk_export_unstructured", - "to_vtk", - "vtk_export", -] - - -# export routines ############################################################# - - -def _vtk_structured_helper(pos, fields): - """Extract field info for vtk rectilinear grid.""" - if not isinstance(fields, dict): - fields = {"field": fields} - if len(pos) > 3: - raise ValueError( - "gstools.vtk_export_structured: " - "vtk export only possible for dim=1,2,3" - ) - x = pos[0] - y = pos[1] if len(pos) > 1 else np.array([0]) - z = pos[2] if len(pos) > 2 else np.array([0]) - # need fortran order in VTK - for field in fields: - fields[field] = fields[field].reshape(-1, order="F") - if len(fields[field]) != len(x) * len(y) * len(z): - raise ValueError( - "gstools.vtk_export_structured: " - "field shape doesn't match the given mesh" - ) - return x, y, z, fields - - -def to_vtk_structured(pos, fields): # pragma: no cover - """Create a vtk structured rectilinear grid from a field. - - Parameters - ---------- - pos : :class:`list` - the position tuple, containing main direction and transversal - directions - fields : :class:`dict` or :class:`numpy.ndarray` - Structured fields to be saved. - Either a single numpy array as returned by SRF, - or a dictionary of fields with theirs names as keys. - - Returns - ------- - :class:`pyvista.RectilinearGrid` - A PyVista rectilinear grid of the structured field data. Data arrays - live on the point data of this PyVista dataset. - """ - x, y, z, fields = _vtk_structured_helper(pos=pos, fields=fields) - if pv is not None: - grid = pv.RectilinearGrid(x, y, z) - grid.point_data.update(fields) - else: - raise ImportError("Please install PyVista to create VTK datasets.") - return grid - - -def vtk_export_structured(filename, pos, fields): # pragma: no cover - """Export a field to vtk structured rectilinear grid file. - - Parameters - ---------- - filename : :class:`str` - Filename of the file to be saved, including the path. Note that an - ending (.vtr) will be added to the name. - pos : :class:`list` - the position tuple, containing main direction and transversal - directions - fields : :class:`dict` or :class:`numpy.ndarray` - Structured fields to be saved. - Either a single numpy array as returned by SRF, - or a dictionary of fields with theirs names as keys. - """ - x, y, z, fields = _vtk_structured_helper(pos=pos, fields=fields) - return gridToVTK(filename, x, y, z, pointData=fields) - - -def _vtk_unstructured_helper(pos, fields): - if not isinstance(fields, dict): - fields = {"field": fields} - if len(pos) > 3: - raise ValueError( - "gstools.vtk_export_structured: " - "vtk export only possible for dim=1,2,3" - ) - x = pos[0] - y = pos[1] if len(pos) > 1 else np.zeros_like(x) - z = pos[2] if len(pos) > 2 else np.zeros_like(x) - for field in fields: - fields[field] = fields[field].reshape(-1) - if ( - len(fields[field]) != len(x) - or len(fields[field]) != len(y) - or len(fields[field]) != len(z) - ): - raise ValueError( - "gstools.vtk_export_unstructured: " - "field shape doesn't match the given mesh" - ) - return x, y, z, fields - - -def to_vtk_unstructured(pos, fields): # pragma: no cover - """Export a field to vtk structured rectilinear grid file. - - Parameters - ---------- - pos : :class:`list` - the position tuple, containing main direction and transversal - directions - fields : :class:`dict` or :class:`numpy.ndarray` - Unstructured fields to be saved. - Either a single numpy array as returned by SRF, - or a dictionary of fields with theirs names as keys. - - Returns - ------- - :class:`pyvista.UnstructuredGrid` - A PyVista unstructured grid of the unstructured field data. Data arrays - live on the point data of this PyVista dataset. This is essentially - a point cloud with no topology. - """ - x, y, z, fields = _vtk_unstructured_helper(pos=pos, fields=fields) - if pv is not None: - grid = pv.PolyData(np.c_[x, y, z]).cast_to_unstructured_grid() - grid.point_data.update(fields) - else: - raise ImportError("Please install PyVista to create VTK datasets.") - return grid - - -def vtk_export_unstructured(filename, pos, fields): # pragma: no cover - """Export a field to vtk unstructured grid file. - - Parameters - ---------- - filename : :class:`str` - Filename of the file to be saved, including the path. Note that an - ending (.vtu) will be added to the name. - pos : :class:`list` - the position tuple, containing main direction and transversal - directions - fields : :class:`dict` or :class:`numpy.ndarray` - Unstructured fields to be saved. - Either a single numpy array as returned by SRF, - or a dictionary of fields with theirs names as keys. - """ - x, y, z, fields = _vtk_unstructured_helper(pos=pos, fields=fields) - return pointsToVTK(filename, x, y, z, data=fields) - - -def to_vtk(pos, fields, mesh_type="unstructured"): # pragma: no cover - """Create a VTK/PyVista grid. - - Parameters - ---------- - pos : :class:`list` - the position tuple, containing main direction and transversal - directions - fields : :class:`dict` or :class:`numpy.ndarray` - [Un]structured fields to be saved. - Either a single numpy array as returned by SRF, - or a dictionary of fields with theirs names as keys. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured'. Default: structured - - Returns - ------- - :class:`pyvista.RectilinearGrid` or :class:`pyvista.UnstructuredGrid` - This will return a PyVista object for the given field data in its - appropriate type. Structured meshes will return a - :class:`pyvista.RectilinearGrid` and unstructured meshes will return - an :class:`pyvista.UnstructuredGrid` object. - """ - if mesh_type != "unstructured": - grid = to_vtk_structured(pos=pos, fields=fields) - else: - grid = to_vtk_unstructured(pos=pos, fields=fields) - return grid - - -def vtk_export( - filename, pos, fields, mesh_type="unstructured" -): # pragma: no cover - """Export a field to vtk. - - Parameters - ---------- - filename : :class:`str` - Filename of the file to be saved, including the path. Note that an - ending (.vtr or .vtu) will be added to the name. - pos : :class:`list` - the position tuple, containing main direction and transversal - directions - fields : :class:`dict` or :class:`numpy.ndarray` - [Un]structured fields to be saved. - Either a single numpy array as returned by SRF, - or a dictionary of fields with theirs names as keys. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured'. Default: structured - """ - if mesh_type != "unstructured": - return vtk_export_structured(filename=filename, pos=pos, fields=fields) - return vtk_export_unstructured(filename=filename, pos=pos, fields=fields) diff --git a/src/gstools_cython/tools/geometric.py b/src/gstools_cython/tools/geometric.py deleted file mode 100644 index 55408965..00000000 --- a/src/gstools_cython/tools/geometric.py +++ /dev/null @@ -1,754 +0,0 @@ -""" -GStools subpackage providing geometric tools. - -.. currentmodule:: gstools.tools.geometric - -The following functions are provided - -.. autosummary:: - set_angles - set_anis - no_of_angles - rotation_planes - givens_rotation - matrix_rotate - matrix_derotate - matrix_isotropify - matrix_anisotropify - matrix_isometrize - matrix_anisometrize - rotated_main_axes - generate_grid - generate_st_grid - format_struct_pos_dim - format_struct_pos_shape - format_unstruct_pos_shape - ang2dir - latlon2pos - pos2latlon - chordal_to_great_circle - great_circle_to_chordal -""" - -# pylint: disable=C0103 -import numpy as np - -__all__ = [ - "set_angles", - "set_anis", - "no_of_angles", - "rotation_planes", - "givens_rotation", - "matrix_rotate", - "matrix_derotate", - "matrix_isotropify", - "matrix_anisotropify", - "matrix_isometrize", - "matrix_anisometrize", - "rotated_main_axes", - "generate_grid", - "generate_st_grid", - "format_struct_pos_dim", - "format_struct_pos_shape", - "format_unstruct_pos_shape", - "ang2dir", - "latlon2pos", - "pos2latlon", - "chordal_to_great_circle", -] - - -# Geometric functions ######################################################### - - -def set_angles(dim, angles): - """Set the angles for the given dimension. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the angles of the SRF - - Returns - ------- - angles : :class:`float` - the angles fitting to the dimension - - Notes - ----- - If too few angles are given, they are filled up with `0`. - """ - out_angles = np.asarray(angles, dtype=np.double) - out_angles = np.atleast_1d(out_angles)[: no_of_angles(dim)] - # fill up the rotation angle array with zeros - out_angles = np.pad( - out_angles, - (0, no_of_angles(dim) - len(out_angles)), - "constant", - constant_values=0.0, - ) - return out_angles - - -def set_anis(dim, anis): - """Set the anisotropy ratios for the given dimension. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - anis : :class:`list` of :class:`float` - the anisotropy of length scales along the transversal directions - - Returns - ------- - anis : :class:`list` of :class:`float` - the anisotropy of length scales fitting the dimensions - - Notes - ----- - If too few anisotropy ratios are given, they are filled up with `1`. - """ - out_anis = np.asarray(anis, dtype=np.double) - out_anis = np.atleast_1d(out_anis)[: dim - 1] - if len(out_anis) < dim - 1: - # fill up the anisotropies with ones, such that len()==dim-1 - out_anis = np.pad( - out_anis, - (dim - len(out_anis) - 1, 0), - "constant", - constant_values=1.0, - ) - return out_anis - - -def no_of_angles(dim): - """Calculate number of rotation angles depending on the dimension. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - - Returns - ------- - :class:`int` - Number of angles. - """ - return (dim * (dim - 1)) // 2 - - -def rotation_planes(dim): - """Get all 2D sub-planes for rotation. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - - Returns - ------- - :class:`list` of :class:`tuple` of :class:`int` - All 2D sub-planes for rotation. - """ - return [(i, j) for j in range(1, dim) for i in range(j)] - - -def givens_rotation(dim, plane, angle): - """Givens rotation matrix in arbitrary dimensions. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - plane : :class:`list` of :class:`int` - the plane to rotate in, given by the indices of the two defining axes. - For example the xy plane is defined by `(0,1)` - angle : :class:`float` or :class:`list` - the rotation angle in the given plane - - Returns - ------- - :class:`numpy.ndarray` - Rotation matrix. - """ - result = np.eye(dim, dtype=np.double) - result[plane[0], plane[0]] = np.cos(angle) - result[plane[1], plane[1]] = np.cos(angle) - result[plane[0], plane[1]] = -np.sin(angle) - result[plane[1], plane[0]] = np.sin(angle) - return result - - -def matrix_rotate(dim, angles): - """Create a matrix to rotate points to the target coordinate-system. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the rotation angles of the target coordinate-system - - Returns - ------- - :class:`numpy.ndarray` - Rotation matrix. - """ - angles = set_angles(dim, angles) - planes = rotation_planes(dim) - result = np.eye(dim, dtype=np.double) - for i, (angle, plane) in enumerate(zip(angles, planes)): - # angles have alternating signs to match tait-bryan - result = np.matmul( - givens_rotation(dim, plane, (-1) ** i * angle), result - ) - return result - - -def matrix_derotate(dim, angles): - """Create a matrix to derotate points to the initial coordinate-system. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the rotation angles of the target coordinate-system - - Returns - ------- - :class:`numpy.ndarray` - Rotation matrix. - """ - # derotating by taking negative angles - angles = -set_angles(dim, angles) - planes = rotation_planes(dim) - result = np.eye(dim, dtype=np.double) - for i, (angle, plane) in enumerate(zip(angles, planes)): - # angles have alternating signs to match tait bryan - result = np.matmul( - result, givens_rotation(dim, plane, (-1) ** i * angle) - ) - return result - - -def matrix_isotropify(dim, anis): - """Create a stretching matrix to make things isotrope. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - anis : :class:`list` of :class:`float` - the anisotropy of length scales along the transversal directions - - Returns - ------- - :class:`numpy.ndarray` - Stretching matrix. - """ - anis = set_anis(dim, anis) - return np.diag(np.concatenate(([1.0], 1.0 / anis))) - - -def matrix_anisotropify(dim, anis): - """Create a stretching matrix to make things anisotrope. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - anis : :class:`list` of :class:`float` - the anisotropy of length scales along the transversal directions - - Returns - ------- - :class:`numpy.ndarray` - Stretching matrix. - """ - anis = set_anis(dim, anis) - return np.diag(np.concatenate(([1.0], anis))) - - -def matrix_isometrize(dim, angles, anis): - """Create a matrix to derotate points and make them isotrope. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the rotation angles of the target coordinate-system - anis : :class:`list` of :class:`float` - the anisotropy of length scales along the transversal directions - - Returns - ------- - :class:`numpy.ndarray` - Transformation matrix. - """ - return np.matmul( - matrix_isotropify(dim, anis), matrix_derotate(dim, angles) - ) - - -def matrix_anisometrize(dim, angles, anis): - """Create a matrix to rotate points and make them anisotrope. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the rotation angles of the target coordinate-system - anis : :class:`list` of :class:`float` - the anisotropy of length scales along the transversal directions - - Returns - ------- - :class:`numpy.ndarray` - Transformation matrix. - """ - return np.matmul( - matrix_rotate(dim, angles), matrix_anisotropify(dim, anis) - ) - - -def rotated_main_axes(dim, angles): - """Create list of the main axis defined by the given system rotations. - - Parameters - ---------- - dim : :class:`int` - spatial dimension - angles : :class:`float` or :class:`list` - the rotation angles of the target coordinate-system - - Returns - ------- - :class:`numpy.ndarray` - Main axes of the target coordinate-system. - """ - return matrix_rotate(dim, angles).T - - -# grid routines ############################################################### - - -def generate_grid(pos): - """ - Generate grid from a structured position tuple. - - Parameters - ---------- - pos : :class:`tuple` of :class:`numpy.ndarray` - The structured position tuple. - - Returns - ------- - :class:`numpy.ndarray` - Unstructured position tuple. - """ - return np.asarray( - np.meshgrid(*pos, indexing="ij"), dtype=np.double - ).reshape((len(pos), -1)) - - -def generate_st_grid(pos, time, mesh_type="unstructured"): - """ - Generate spatio-temporal grid from a position tuple and time array. - - Parameters - ---------- - pos : :class:`tuple` of :class:`numpy.ndarray` - The (un-)structured position tuple. - time : :any:`iterable` - The time array. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: `"unstructured"` - - Returns - ------- - :class:`numpy.ndarray` - Unstructured spatio-temporal point tuple. - - Notes - ----- - Time dimension will be the last one. - """ - time = np.asarray(time, dtype=np.double).reshape(-1) - if mesh_type != "unstructured": - pos = generate_grid(pos) - else: - pos = np.atleast_2d(np.asarray(pos, dtype=np.double)) - out = [np.repeat(p.reshape(-1), np.size(time)) for p in pos] - out.append(np.tile(time, np.size(pos[0]))) - return np.asarray(out, dtype=np.double) - - -# conversion ################################################################## - - -def format_struct_pos_dim(pos, dim): - """ - Format a structured position tuple with given dimension. - - Parameters - ---------- - pos : :any:`iterable` - Position tuple, containing main direction and transversal directions. - dim : :class:`int` - Spatial dimension. - - Raises - ------ - ValueError - When position tuple doesn't match the given dimension. - - Returns - ------- - pos : :class:`tuple` of :class:`numpy.ndarray` - The formatted structured position tuple. - shape : :class:`tuple` - Shape of the resulting field. - """ - if dim == 1: - pos = (np.asarray(pos, dtype=np.double).reshape(-1),) - elif len(pos) != dim: - raise ValueError("Formatting: position tuple doesn't match dimension.") - else: - pos = tuple(np.asarray(p, dtype=np.double).reshape(-1) for p in pos) - shape = tuple(len(p) for p in pos) - return pos, shape - - -def format_struct_pos_shape(pos, shape, check_stacked_shape=False): - """ - Format a structured position tuple with given shape. - - Shape could be stacked, when multiple fields are given. - - Parameters - ---------- - pos : :any:`iterable` - Position tuple, containing main direction and transversal directions. - shape : :class:`tuple` - Shape of the input field. - check_stacked_shape : :class:`bool`, optional - Whether to check if given shape comes from stacked fields. - Default: False. - - Raises - ------ - ValueError - When position tuple doesn't match the given dimension. - - Returns - ------- - pos : :class:`tuple` of :class:`numpy.ndarray` - The formatted structured position tuple. - shape : :class:`tuple` - Shape of the resulting field. - dim : :class:`int` - Spatial dimension. - """ - # some help from the given shape - shape_size = np.prod(shape) - stacked_shape_size = np.prod(shape[1:]) - wrong_shape = False - # now we try to be smart - try: - # if this works we have either: - # - a 1D array - # - nD array where all axes have same length (corner case) - check_pos = np.array(pos, dtype=np.double, ndmin=2) - except ValueError: - # if it doesn't work, we have a tuple of differently sized axes (easy) - dim = len(pos) - pos, pos_shape = format_struct_pos_dim(pos, dim) - # determine if we have a stacked field if wanted - if check_stacked_shape and stacked_shape_size == np.prod(pos_shape): - shape = (shape[0],) + pos_shape - # check if we have a single field with matching size - elif shape_size == np.prod(pos_shape): - shape = (1,) + pos_shape if check_stacked_shape else pos_shape - # if nothing works, we raise an error - else: - wrong_shape = True - else: - struct_size = np.prod([p.size for p in check_pos]) - # case: 1D unstacked - if check_pos.size == shape_size: - dim = 1 - pos, pos_shape = format_struct_pos_dim(check_pos, dim) - shape = (1,) + pos_shape if check_stacked_shape else pos_shape - # case: 1D and stacked - elif check_pos.size == stacked_shape_size: - dim = 1 - pos, pos_shape = format_struct_pos_dim(check_pos, dim) - cnt = shape[0] - shape = (cnt,) + pos_shape - wrong_shape = not check_stacked_shape - # case: nD unstacked - elif struct_size == shape_size: - dim = len(check_pos) - pos, pos_shape = format_struct_pos_dim(pos, dim) - shape = (1,) + pos_shape if check_stacked_shape else pos_shape - # case: nD and stacked - elif struct_size == stacked_shape_size: - dim = len(check_pos) - pos, pos_shape = format_struct_pos_dim(pos, dim) - cnt = shape[0] - shape = (cnt,) + pos_shape - wrong_shape = not check_stacked_shape - # if nothing works, we raise an error - else: - wrong_shape = True - - # if shape was wrong at one point we raise an error - if wrong_shape: - raise ValueError("Formatting: position tuple doesn't match dimension.") - - return pos, shape, dim - - -def format_unstruct_pos_shape(pos, shape, check_stacked_shape=False): - """ - Format an unstructured position tuple with given shape. - - Shape could be stacked, when multiple fields were given. - - Parameters - ---------- - pos : :any:`iterable` - Position tuple, containing point coordinates. - shape : :class:`tuple` - Shape of the input field. - check_stacked_shape : :class:`bool`, optional - Whether to check if given shape comes from stacked fields. - Default: False. - - Raises - ------ - ValueError - When position tuple doesn't match the given dimension. - - Returns - ------- - pos : :class:`tuple` of :class:`numpy.ndarray` - The formatted structured position tuple. - shape : :class:`tuple` - Shape of the resulting field. - dim : :class:`int` - Spatial dimension. - """ - # some help from the given shape - shape_size = np.prod(shape) - stacked_shape_size = np.prod(shape[1:]) - wrong_shape = False - # now we try to be smart - pre_len = len(np.atleast_1d(pos)) - # care about 1D: pos can be given as 1D array here -> convert to 2D array - pos = np.atleast_2d(np.asarray(pos, dtype=np.double)) - post_len = len(pos) - # first array dimension should be spatial dimension (1D is special case) - dim = post_len if pre_len == post_len else 1 - pnt_cnt = pos[0].size - # case: 1D unstacked - if dim == 1 and pos.size == shape_size: - shape = (1, pos.size) if check_stacked_shape else (pos.size,) - # case: 1D and stacked - elif dim == 1 and pos.size == stacked_shape_size: - shape = (shape[0], pos.size) - wrong_shape = not check_stacked_shape - # case: nD unstacked - elif pnt_cnt == shape_size: - shape = (1, pnt_cnt) if check_stacked_shape else pnt_cnt - # case: nD and stacked - elif pnt_cnt == stacked_shape_size: - shape = (shape[0], pnt_cnt) - wrong_shape = not check_stacked_shape - # if nothing works, we raise an error - else: - wrong_shape = True - - # if shape was wrong at one point we raise an error - if wrong_shape: - raise ValueError("Formatting: position tuple doesn't match dimension.") - - pos = pos.reshape((dim, -1)) - - return pos, shape, dim - - -def ang2dir(angles, dtype=np.double, dim=None): - """Convert n-D spherical coordinates to Euclidean direction vectors. - - Parameters - ---------- - angles : :class:`list` of :class:`numpy.ndarray` - spherical coordinates given as angles. - dtype : data-type, optional - The desired data-type for the array. - If not given, then the type will be determined as the minimum type - required to hold the objects in the sequence. Default: None - dim : :class:`int`, optional - Cut of information above the given dimension. - Otherwise, dimension is determined by number of angles - Default: None - - Returns - ------- - :class:`numpy.ndarray` - the array of direction vectors - """ - pre_dim = np.asanyarray(angles).ndim - angles = np.atleast_2d(np.asarray(angles, dtype=dtype)) - if len(angles.shape) > 2: - raise ValueError(f"Can't interpret angles array {angles}") - dim = angles.shape[1] + 1 if dim is None else dim - if dim == 2 and angles.shape[0] == 1 and pre_dim < 2: - # fix for 2D where only one angle per direction is given - angles = angles.T # can't be interpreted if dim=None is given - if dim != angles.shape[1] + 1 or dim == 1: - raise ValueError(f"Wrong dim. ({dim}) for angles {angles}") - vec = np.empty((angles.shape[0], dim), dtype=dtype) - vec[:, 0] = np.prod(np.sin(angles), axis=1) - for i in range(1, dim): - vec[:, i] = np.prod(np.sin(angles[:, i:]), axis=1) # empty prod = 1 - vec[:, i] *= np.cos(angles[:, (i - 1)]) - if dim in [2, 3]: - vec[:, [0, 1]] = vec[:, [1, 0]] # to match convention in 2D and 3D - return vec - - -def latlon2pos( - latlon, radius=1.0, dtype=np.double, temporal=False, time_scale=1.0 -): - """Convert lat-lon geo coordinates to 3D position tuple. - - Parameters - ---------- - latlon : :class:`list` of :class:`numpy.ndarray` - latitude and longitude given in degrees. - May includes an appended time axis if `time=True`. - radius : :class:`float`, optional - Sphere radius. Default: `1.0` - dtype : data-type, optional - The desired data-type for the array. - If not given, then the type will be determined as the minimum type - required to hold the objects in the sequence. Default: None - temporal : :class:`bool`, optional - Whether latlon includes an appended time axis. - Default: False - time_scale : :class:`float`, optional - Scaling factor (e.g. anisotropy) for the time axis. - Default: `1.0` - - Returns - ------- - :class:`numpy.ndarray` - the 3D position array - """ - latlon = np.asarray(latlon, dtype=dtype).reshape( - (3 if temporal else 2, -1) - ) - lat, lon = np.deg2rad(latlon[:2]) - pos_tuple = ( - radius * np.cos(lat) * np.cos(lon), - radius * np.cos(lat) * np.sin(lon), - radius * np.sin(lat) * np.ones_like(lon), - ) - if temporal: - return np.array(pos_tuple + (latlon[2] / time_scale,), dtype=dtype) - return np.array(pos_tuple, dtype=dtype) - - -def pos2latlon( - pos, radius=1.0, dtype=np.double, temporal=False, time_scale=1.0 -): - """Convert 3D position tuple from sphere to lat-lon geo coordinates. - - Parameters - ---------- - pos : :class:`list` of :class:`numpy.ndarray` - The position tuple containing points on a unit-sphere. - May includes an appended time axis if `time=True`. - radius : :class:`float`, optional - Sphere radius. Default: `1.0` - dtype : data-type, optional - The desired data-type for the array. - If not given, then the type will be determined as the minimum type - required to hold the objects in the sequence. Default: None - temporal : :class:`bool`, optional - Whether latlon includes an appended time axis. - Default: False - time_scale : :class:`float`, optional - Scaling factor (e.g. anisotropy) for the time axis. - Default: `1.0` - - Returns - ------- - :class:`numpy.ndarray` - the 3D position array - """ - pos = np.asarray(pos, dtype=dtype).reshape((4 if temporal else 3, -1)) - # prevent numerical errors in arcsin - lat = np.arcsin(np.maximum(np.minimum(pos[2] / radius, 1.0), -1.0)) - lon = np.arctan2(pos[1], pos[0]) - latlon = np.rad2deg((lat, lon), dtype=dtype) - if temporal: - return np.array( - (latlon[0], latlon[1], pos[3] * time_scale), dtype=dtype - ) - return latlon - - -def chordal_to_great_circle(dist, radius=1.0): - """ - Calculate great circle distance corresponding to given chordal distance. - - Parameters - ---------- - dist : array_like - Chordal distance of two points on the sphere. - radius : :class:`float`, optional - Sphere radius. Default: `1.0` - - Returns - ------- - :class:`numpy.ndarray` - Great circle distance corresponding to given chordal distance. - - Notes - ----- - If given values are not in [0, 2 * radius], they will be truncated. - """ - diameter = 2 * radius - return diameter * np.arcsin( - np.maximum(np.minimum(np.divide(dist, diameter), 1), 0) - ) - - -def great_circle_to_chordal(dist, radius=1.0): - """ - Calculate chordal distance corresponding to given great circle distance. - - Parameters - ---------- - dist : array_like - Great circle distance of two points on the sphere. - radius : :class:`float`, optional - Sphere radius. Default: `1.0` - - Returns - ------- - :class:`numpy.ndarray` - Chordal distance corresponding to given great circle distance. - """ - diameter = 2 * radius - return diameter * np.sin(np.divide(dist, diameter)) diff --git a/src/gstools_cython/tools/misc.py b/src/gstools_cython/tools/misc.py deleted file mode 100755 index aaba1501..00000000 --- a/src/gstools_cython/tools/misc.py +++ /dev/null @@ -1,143 +0,0 @@ -""" -GStools subpackage providing miscellaneous tools. - -.. currentmodule:: gstools.tools.misc - -The following functions are provided - -.. autosummary:: - get_fig_ax - list_format - eval_func -""" - -# pylint: disable=C0103, C0415 -import numpy as np - -from gstools.tools.geometric import format_struct_pos_dim, generate_grid - -__all__ = ["get_fig_ax", "list_format", "eval_func"] - - -def get_fig_ax(fig=None, ax=None, ax_name="rectilinear"): # pragma: no cover - """ - Get correct matplotlib figure and axes. - - Parameters - ---------- - fig : figure or :any:`None` - desired figure. - ax : axis or :any:`None` - desired axis. - ax_name : :class:`str`, optional - Axis type name. The default is "rectilinear". - - Returns - ------- - fig : figure - desired figure. - ax : axis - desired axis. - """ - try: - from matplotlib import pyplot as plt - except ImportError as exc: - raise ImportError("Plotting: Matplotlib not installed.") from exc - - if fig is None and ax is None: - fig = plt.figure() - ax = fig.add_subplot(111, projection=ax_name) - elif ax is None: - ax = fig.add_subplot(111, projection=ax_name) - elif fig is None: - fig = ax.get_figure() - assert ax.name == ax_name - else: - assert ax.name == ax_name - assert ax.get_figure() == fig - return fig, ax - - -def list_format(lst, prec): # pragma: no cover - """Format a list of floats.""" - return f"[{', '.join(f'{float(x):.{prec}}' for x in lst)}]" - - -def eval_func( - func_val, - pos, - dim, - mesh_type="unstructured", - value_type="scalar", - broadcast=False, -): - """ - Evaluate a function on a mesh. - - Parameters - ---------- - func_val : :any:`callable` or :class:`float` or :any:`None` - Function to be called or single value to be filled. - Should have the signature f(x, [y, z, ...]) in case of callable. - In case of a float, the field will be filled with a single value and - in case of None, this value will be set to 0. - pos : :class:`list` - The position tuple, containing main direction and transversal - directions (x, [y, z, ...]). - dim : :class:`int` - The spatial dimension. - mesh_type : :class:`str`, optional - 'structured' / 'unstructured' - Default: 'unstructured' - value_type : :class:`str`, optional - Value type of the field. Either "scalar" or "vector". - The default is "scalar". - broadcast : :class:`bool`, optional - Whether to return a single value, if a single value was given. - Default: False - - Returns - ------- - :class:`numpy.ndarray` - Function values at the given points. - """ - # care about scalar inputs - func_val = 0 if func_val is None else func_val - if broadcast and not callable(func_val) and np.size(func_val) == 1: - return np.asarray(func_val, dtype=np.double).item() - if not callable(func_val): - func_val = _func_from_single_val(func_val, dim, value_type=value_type) - # care about mesh and function call - if mesh_type != "unstructured": - pos, shape = format_struct_pos_dim(pos, dim) - pos = generate_grid(pos) - else: - pos = np.asarray(pos, dtype=np.double).reshape(dim, -1) - shape = np.shape(pos[0]) - # prepend dimension if we have a vector field - if value_type == "vector": - shape = (dim,) + shape - return np.reshape(func_val(*pos), shape) - - -def _func_from_single_val(value, dim=None, value_type="scalar"): - # care about broadcasting vector values for each dim - v_d = dim if value_type == "vector" else 1 # value dim - if v_d is None: # pragma: no cover - raise ValueError("_func_from_single_val: dim needed for vector value.") - value = np.asarray(value, dtype=np.double).ravel()[:v_d] - # fill up vector valued output to dimension with last value - value = np.pad( - value, (0, v_d - len(value)), "constant", constant_values=value[-1] - ) - - def _f(*pos): - # zip uses shortest len of iterables given (correct for scalar value) - return np.concatenate( - [ - np.full_like(p, val, dtype=np.double) - for p, val in zip(pos, value) - ] - ) - - return _f diff --git a/src/gstools_cython/tools/special.py b/src/gstools_cython/tools/special.py deleted file mode 100644 index 1457b736..00000000 --- a/src/gstools_cython/tools/special.py +++ /dev/null @@ -1,257 +0,0 @@ -""" -GStools subpackage providing special functions. - -.. currentmodule:: gstools.tools.special - -The following functions are provided - -.. autosummary:: - inc_gamma - inc_gamma_low - exp_int - inc_beta - tplstable_cor - tpl_exp_spec_dens - tpl_gau_spec_dens -""" - -# pylint: disable=C0103, E1101 -import numpy as np -from scipy import special as sps - -__all__ = [ - "confidence_scaling", - "inc_gamma", - "inc_gamma_low", - "exp_int", - "inc_beta", - "tplstable_cor", - "tpl_exp_spec_dens", - "tpl_gau_spec_dens", -] - - -# special functions ########################################################### - - -def confidence_scaling(per=0.95): - """ - Scaling of standard deviation to get the desired confidence interval. - - Parameters - ---------- - per : :class:`float`, optional - Confidence level. The default is 0.95. - - Returns - ------- - :class:`float` - Scale to multiply the standard deviation with. - """ - return np.sqrt(2) * sps.erfinv(per) - - -def inc_gamma(s, x): - r"""Calculate the (upper) incomplete gamma function. - - Given by: :math:`\Gamma(s,x) = \int_x^{\infty} t^{s-1}\,e^{-t}\,{\rm d}t` - - Parameters - ---------- - s : :class:`float` - exponent in the integral - x : :class:`numpy.ndarray` - input values - """ - if np.isclose(s, 0): - return sps.exp1(x) - if np.isclose(s, np.around(s)) and s < -0.5: - return x**s * sps.expn(int(1 - np.around(s)), x) - if s < 0: - return (inc_gamma(s + 1, x) - x**s * np.exp(-x)) / s - return sps.gamma(s) * sps.gammaincc(s, x) - - -def inc_gamma_low(s, x): - r"""Calculate the lower incomplete gamma function. - - Given by: :math:`\gamma(s,x) = \int_0^x t^{s-1}\,e^{-t}\,{\rm d}t` - - Parameters - ---------- - s : :class:`float` - exponent in the integral - x : :class:`numpy.ndarray` - input values - """ - if np.isclose(s, np.around(s)) and s < 0.5: - return np.full_like(x, np.inf, dtype=np.double) - if s < 0: - return (inc_gamma_low(s + 1, x) + x**s * np.exp(-x)) / s - return sps.gamma(s) * sps.gammainc(s, x) - - -def exp_int(s, x): - r"""Calculate the exponential integral :math:`E_s(x)`. - - Given by: :math:`E_s(x) = \int_1^\infty \frac{e^{-xt}}{t^s}\,\mathrm dt` - - Parameters - ---------- - s : :class:`float` - exponent in the integral (should be > -100) - x : :class:`numpy.ndarray` - input values - """ - if np.isclose(s, 1): - return sps.exp1(x) - if np.isclose(s, np.around(s)) and s > -0.5: - return sps.expn(int(np.around(s)), x) - x = np.asarray(x, dtype=np.double) - x_neg = x < 0 - x = np.abs(x) - x_compare = x ** min((10, max(((1 - s), 1)))) - res = np.empty_like(x) - # use asymptotic behavior for zeros - x_zero = np.isclose(x_compare, 0, atol=1e-20) - x_inf = x > max(30, -s / 2) # function is like exp(-x)*(1/x + s/x^2) - x_fin = np.logical_not(np.logical_or(x_zero, x_inf)) - x_fin_pos = np.logical_and(x_fin, np.logical_not(x_neg)) - if s > 1.0: # limit at x=+0 - res[x_zero] = 1.0 / (s - 1.0) - else: - res[x_zero] = np.inf - res[x_inf] = np.exp(-x[x_inf]) * (x[x_inf] ** -1 - s * x[x_inf] ** -2) - res[x_fin_pos] = inc_gamma(1 - s, x[x_fin_pos]) * x[x_fin_pos] ** (s - 1) - res[x_neg] = np.nan # nan for x < 0 - return res - - -def inc_beta(a, b, x): - r"""Calculate the incomplete Beta function. - - Given by: :math:`B(a,b;\,x) = \int_0^x t^{a-1}\,(1-t)^{b-1}\,dt` - - Parameters - ---------- - a : :class:`float` - first exponent in the integral - b : :class:`float` - second exponent in the integral - x : :class:`numpy.ndarray` - input values - """ - return sps.betainc(a, b, x) * sps.beta(a, b) - - -def tplstable_cor(r, len_scale, hurst, alpha): - r"""Calculate the correlation function of the TPLStable model. - - Given by the following correlation function: - - .. math:: - \rho(r) = - \frac{2H}{\alpha} \cdot - E_{1+\frac{2H}{\alpha}} - \left(\left(\frac{r}{\ell}\right)^{\alpha} \right) - - - Parameters - ---------- - r : :class:`numpy.ndarray` - input values - len_scale : :class:`float` - length-scale of the model. - hurst : :class:`float` - Hurst coefficient of the power law. - alpha : :class:`float`, optional - Shape parameter of the stable model. - """ - r = np.asarray(np.abs(r / len_scale), dtype=np.double) - r[np.isclose(r, 0)] = 0 # hack to prevent numerical errors - res = np.ones_like(r) - res[r > 0] = (2 * hurst / alpha) * exp_int( - 1 + 2 * hurst / alpha, (r[r > 0]) ** alpha - ) - return res - - -def tpl_exp_spec_dens(k, dim, len_scale, hurst, len_low=0.0): - r""" - Spectral density of the TPLExponential covariance model. - - Parameters - ---------- - k : :class:`float` - Radius of the phase: :math:`k=\left\Vert\mathbf{k}\right\Vert` - dim : :class:`int` - Dimension of the model. - len_scale : :class:`float` - Length scale of the model. - hurst : :class:`float` - Hurst coefficient of the power law. - len_low : :class:`float`, optional - The lower length scale truncation of the model. - Default: 0.0 - - Returns - ------- - :class:`float` - spectral density of the TPLExponential model - """ - if np.isclose(len_low, 0.0): - k = np.asarray(k, dtype=np.double) - z = (k * len_scale) ** 2 - a = hurst + dim / 2.0 - b = hurst + 0.5 - c = hurst + dim / 2.0 + 1.0 - d = dim / 2.0 + 0.5 - fac = len_scale**dim * hurst * sps.gamma(d) / (np.pi**d * a) - return fac / (1.0 + z) ** a * sps.hyp2f1(a, b, c, z / (1.0 + z)) - fac_up = (len_scale + len_low) ** (2 * hurst) - spec_up = tpl_exp_spec_dens(k, dim, len_scale + len_low, hurst) - fac_low = len_low ** (2 * hurst) - spec_low = tpl_exp_spec_dens(k, dim, len_low, hurst) - return (fac_up * spec_up - fac_low * spec_low) / (fac_up - fac_low) - - -def tpl_gau_spec_dens(k, dim, len_scale, hurst, len_low=0.0): - r""" - Spectral density of the TPLGaussian covariance model. - - Parameters - ---------- - k : :class:`float` - Radius of the phase: :math:`k=\left\Vert\mathbf{k}\right\Vert` - dim : :class:`int` - Dimension of the model. - len_scale : :class:`float` - Length scale of the model. - hurst : :class:`float` - Hurst coefficient of the power law. - len_low : :class:`float`, optional - The lower length scale truncation of the model. - Default: 0.0 - - Returns - ------- - :class:`float` - spectral density of the TPLExponential model - """ - if np.isclose(len_low, 0.0): - k = np.asarray(k, dtype=np.double) - z = np.array((k * len_scale / 2.0) ** 2) - res = np.empty_like(z) - z_gz = z > 0.1 # greater zero - z_nz = np.logical_not(z_gz) # near zero - a = hurst + dim / 2.0 - fac = (len_scale / 2.0) ** dim * hurst / np.pi ** (dim / 2.0) - res[z_gz] = fac * inc_gamma_low(a, z[z_gz]) / z[z_gz] ** a - # first order approximation for z near zero - res[z_nz] = fac * (1.0 / a - z[z_nz] / (a + 1.0)) - return res - fac_up = (len_scale + len_low) ** (2 * hurst) - spec_up = tpl_gau_spec_dens(k, dim, len_scale + len_low, hurst) - fac_low = len_low ** (2 * hurst) - spec_low = tpl_gau_spec_dens(k, dim, len_low, hurst) - return (fac_up * spec_up - fac_low * spec_low) / (fac_up - fac_low) diff --git a/src/gstools_cython/transform/__init__.py b/src/gstools_cython/transform/__init__.py deleted file mode 100644 index b2e6ce4f..00000000 --- a/src/gstools_cython/transform/__init__.py +++ /dev/null @@ -1,93 +0,0 @@ -""" -GStools subpackage providing transformations to post-process normal fields. - -.. currentmodule:: gstools.transform - -Wrapper -^^^^^^^ - -.. autosummary:: - :toctree: - - apply - -Field Transformations -^^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - binary - discrete - boxcox - zinnharvey - normal_force_moments - normal_to_lognormal - normal_to_uniform - normal_to_arcsin - normal_to_uquad - apply_function - -Array Transformations -^^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - array_discrete - array_boxcox - array_zinnharvey - array_force_moments - array_to_lognormal - array_to_uniform - array_to_arcsin - array_to_uquad - ----- -""" - -from gstools.transform.array import ( - array_boxcox, - array_discrete, - array_force_moments, - array_to_arcsin, - array_to_lognormal, - array_to_uniform, - array_to_uquad, - array_zinnharvey, -) -from gstools.transform.field import ( - apply, - apply_function, - binary, - boxcox, - discrete, - normal_force_moments, - normal_to_arcsin, - normal_to_lognormal, - normal_to_uniform, - normal_to_uquad, - zinnharvey, -) - -__all__ = [ - "apply", - "apply_function", - "binary", - "discrete", - "boxcox", - "zinnharvey", - "normal_force_moments", - "normal_to_lognormal", - "normal_to_uniform", - "normal_to_arcsin", - "normal_to_uquad", - "array_discrete", - "array_boxcox", - "array_zinnharvey", - "array_force_moments", - "array_to_lognormal", - "array_to_uniform", - "array_to_arcsin", - "array_to_uquad", -] diff --git a/src/gstools_cython/transform/array.py b/src/gstools_cython/transform/array.py deleted file mode 100644 index 87564edf..00000000 --- a/src/gstools_cython/transform/array.py +++ /dev/null @@ -1,360 +0,0 @@ -""" -GStools subpackage providing array transformations. - -.. currentmodule:: gstools.transform.array - -The following functions are provided - -Transformations -^^^^^^^^^^^^^^^ - -.. autosummary:: - array_discrete - array_boxcox - array_zinnharvey - array_force_moments - array_to_lognormal - array_to_uniform - array_to_arcsin - array_to_uquad -""" - -# pylint: disable=C0103, C0123, R0911 -from warnings import warn - -import numpy as np -from scipy.special import erf, erfinv - -__all__ = [ - "array_discrete", - "array_boxcox", - "array_zinnharvey", - "array_force_moments", - "array_to_lognormal", - "array_to_uniform", - "array_to_arcsin", - "array_to_uquad", -] - - -def array_discrete( - field, values, thresholds="arithmetic", mean=None, var=None -): - """ - Discrete transformation. - - After this transformation, the field has only `len(values)` discrete - values. - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - values : :any:`numpy.ndarray` - The discrete values the field will take - thresholds : :class:`str` or :any:`numpy.ndarray`, optional - the thresholds, where the value classes are separated - possible values are: - * "arithmetic": the mean of the 2 neighbouring values - * "equal": divide the field into equal parts - * an array of explicitly given thresholds - Default: "arithmetic" - mean : :class:`float`or :any:`None` - Mean of the field for "equal" thresholds. Default: np.mean(field) - var : :class:`float`or :any:`None` - Variance of the field for "equal" thresholds. Default: np.var(field) - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - field = np.asarray(field) - if thresholds == "arithmetic": - # just in case, sort the values - values = np.sort(values) - thresholds = (values[1:] + values[:-1]) / 2 - elif thresholds == "equal": - mean = np.mean(field) if mean is None else float(mean) - var = np.var(field) if var is None else float(var) - values = np.asarray(values) - n = len(values) - p = np.arange(1, n) / n # n-1 equal subdivisions of [0, 1] - rescale = np.sqrt(var * 2) - # use quantile of the normal distribution to get equal ratios - thresholds = mean + rescale * erfinv(2 * p - 1) - else: - if len(values) != len(thresholds) + 1: - raise ValueError( - "discrete transformation: len(values) != len(thresholds) + 1" - ) - values = np.asarray(values) - thresholds = np.asarray(thresholds) - # check thresholds - if not np.all(thresholds[:-1] < thresholds[1:]): - raise ValueError( - "discrete transformation: thresholds need to be ascending" - ) - # use a separate result so the intermediate results are not affected - result = np.empty_like(field) - # handle edge cases - result[field <= thresholds[0]] = values[0] - result[field > thresholds[-1]] = values[-1] - for i, value in enumerate(values[1:-1]): - result[ - np.logical_and(thresholds[i] < field, field <= thresholds[i + 1]) - ] = value - return result - - -def array_boxcox(field, lmbda=1, shift=0): - """ - (Inverse) Box-Cox transformation to denormalize data. - - After this transformation, the again Box-Cox transformed field is normal - distributed. - - See: https://en.wikipedia.org/wiki/Power_transform#Box%E2%80%93Cox_transformation - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - lmbda : :class:`float`, optional - The lambda parameter of the Box-Cox transformation. - For ``lmbda=0`` one obtains the log-normal transformation. - Default: ``1`` - shift : :class:`float`, optional - The shift parameter from the two-parametric Box-Cox transformation. - The field will be shifted by that value before transformation. - Default: ``0`` - """ - field = np.asarray(field) - result = field + shift - if np.isclose(lmbda, 0): - return array_to_lognormal(result) - if np.min(result) < -1 / lmbda: - warn("Box-Cox: Some values will be cut off!") - return (np.maximum(lmbda * result + 1, 0)) ** (1 / lmbda) - - -def array_zinnharvey(field, conn="high", mean=None, var=None): - """ - Zinn and Harvey transformation to connect low or high values. - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - conn : :class:`str`, optional - Desired connectivity. Either "low" or "high". - Default: "high" - mean : :class:`float` or :any:`None`, optional - Mean of the given field. If None is given, the mean will be calculated. - Default: :any:`None` - var : :class:`float` or :any:`None`, optional - Variance of the given field. - If None is given, the variance will be calculated. - Default: :any:`None` - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - field = np.asarray(field) - mean = np.mean(field) if mean is None else float(mean) - var = np.var(field) if var is None else float(var) - result = np.abs((field - mean) / np.sqrt(var)) - result = np.sqrt(2) * erfinv(2 * erf(result / np.sqrt(2)) - 1) - if conn == "high": - result = -result - return result * np.sqrt(var) + mean - - -def array_force_moments(field, mean=0, var=1): - """ - Force moments of a normal distributed field. - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - mean : :class:`float`, optional - Desired mean of the field. - Default: 0 - var : :class:`float` or :any:`None`, optional - Desired variance of the field. - Default: 1 - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - field = np.asarray(field) - var_in = np.var(field) - mean_in = np.mean(field) - rescale = np.sqrt(var / var_in) - return rescale * (field - mean_in) + mean - - -def array_to_lognormal(field): - """ - Transform normal distribution to log-normal distribution. - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - return np.exp(field) - - -def array_to_uniform(field, mean=None, var=None, low=0.0, high=1.0): - """ - Transform normal distribution to uniform distribution on [low, high]. - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - mean : :class:`float` or :any:`None`, optional - Mean of the given field. If None is given, the mean will be calculated. - Default: :any:`None` - var : :class:`float` or :any:`None`, optional - Variance of the given field. - If None is given, the variance will be calculated. - Default: :any:`None` - low : :class:`float`, optional - Lower bound for the uniform distribution. - Default: 0.0 - high : :class:`float`, optional - Upper bound for the uniform distribution. - Default: 1.0 - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - field = np.asarray(field) - mean = np.mean(field) if mean is None else float(mean) - var = np.var(field) if var is None else float(var) - return ( - 0.5 * (1 + erf((field - mean) / np.sqrt(2 * var))) * (high - low) + low - ) - - -def array_to_arcsin(field, mean=None, var=None, a=None, b=None): - """ - Transform normal distribution to arcsin distribution. - - See: https://en.wikipedia.org/wiki/Arcsine_distribution - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - mean : :class:`float` or :any:`None`, optional - Mean of the given field. If None is given, the mean will be calculated. - Default: :any:`None` - var : :class:`float` or :any:`None`, optional - Variance of the given field. - If None is given, the mean will be calculated. - Default: :any:`None` - a : :class:`float`, optional - Parameter a of the arcsin distribution (lower bound). - Default: keep mean and variance - b : :class:`float`, optional - Parameter b of the arcsin distribution (upper bound). - Default: keep mean and variance - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - field = np.asarray(field) - mean = np.mean(field) if mean is None else float(mean) - var = np.var(field) if var is None else float(var) - a = mean - np.sqrt(2.0 * var) if a is None else float(a) - b = mean + np.sqrt(2.0 * var) if b is None else float(b) - return _uniform_to_arcsin(array_to_uniform(field, mean, var), a, b) - - -def array_to_uquad(field, mean=None, var=None, a=None, b=None): - """ - Transform normal distribution to U-quadratic distribution. - - See: https://en.wikipedia.org/wiki/U-quadratic_distribution - - Parameters - ---------- - field : :class:`numpy.ndarray` - Normal distributed values. - mean : :class:`float` or :any:`None`, optional - Mean of the given field. If None is given, the mean will be calculated. - Default: :any:`None` - var : :class:`float` or :any:`None`, optional - Variance of the given field. - If None is given, the variance will be calculated. - Default: :any:`None` - a : :class:`float`, optional - Parameter a of the U-quadratic distribution (lower bound). - Default: keep mean and variance - b : :class:`float`, optional - Parameter b of the U-quadratic distribution (upper bound). - Default: keep mean and variance - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - field = np.asarray(field) - mean = np.mean(field) if mean is None else float(mean) - var = np.var(field) if var is None else float(var) - a = mean - np.sqrt(5.0 / 3.0 * var) if a is None else float(a) - b = mean + np.sqrt(5.0 / 3.0 * var) if b is None else float(b) - return _uniform_to_uquad(array_to_uniform(field, mean, var), a, b) - - -def _uniform_to_arcsin(field, a=0, b=1): - """ - PPF of your desired distribution. - - The PPF is the inverse of the CDF and is used to sample a distribution - from uniform distributed values on [0, 1] - - in this case: the arcsin distribution - See: https://en.wikipedia.org/wiki/Arcsine_distribution - """ - field = np.asarray(field) - return (b - a) * np.sin(np.pi * 0.5 * field) ** 2 + a - - -def _uniform_to_uquad(field, a=0, b=1): - """ - PPF of your desired distribution. - - The PPF is the inverse of the CDF and is used to sample a distribution - from uniform distributed values on [0, 1] - - in this case: the U-quadratic distribution - See: https://en.wikipedia.org/wiki/U-quadratic_distribution - """ - field = np.asarray(field) - al = 12 / (b - a) ** 3 - be = (a + b) / 2 - ga = (a - b) ** 3 / 8 - y_raw = 3 * field / al + ga - result = np.zeros_like(y_raw) - result[y_raw > 0] = y_raw[y_raw > 0] ** (1 / 3) - result[y_raw < 0] = -((-y_raw[y_raw < 0]) ** (1 / 3)) - return result + be diff --git a/src/gstools_cython/transform/field.py b/src/gstools_cython/transform/field.py deleted file mode 100644 index a123e798..00000000 --- a/src/gstools_cython/transform/field.py +++ /dev/null @@ -1,725 +0,0 @@ -""" -GStools subpackage providing field transformations. - -.. currentmodule:: gstools.transform.field - -The following functions are provided - -Wrapper -^^^^^^^ - -.. autosummary:: - apply - -Transformations -^^^^^^^^^^^^^^^ - -.. autosummary:: - apply_function - binary - discrete - boxcox - zinnharvey - normal_force_moments - normal_to_lognormal - normal_to_uniform - normal_to_arcsin - normal_to_uquad -""" - -# pylint: disable=C0103, C0123, R0911, R1735 -import numpy as np - -from gstools.normalizer import ( - Normalizer, - apply_mean_norm_trend, - remove_trend_norm_mean, -) -from gstools.transform.array import ( - array_boxcox, - array_discrete, - array_force_moments, - array_to_arcsin, - array_to_lognormal, - array_to_uniform, - array_to_uquad, - array_zinnharvey, -) - -__all__ = [ - "apply", - "apply_function", - "binary", - "discrete", - "boxcox", - "zinnharvey", - "normal_force_moments", - "normal_to_lognormal", - "normal_to_uniform", - "normal_to_arcsin", - "normal_to_uquad", -] - - -def _pre_process(fld, data, keep_mean): - return remove_trend_norm_mean( - pos=fld.pos, - field=data, - mean=None if keep_mean else fld.mean, - normalizer=fld.normalizer, - trend=fld.trend, - mesh_type=fld.mesh_type, - value_type=fld.value_type, - check_shape=False, - ) - - -def _post_process(fld, data, keep_mean): - return apply_mean_norm_trend( - pos=fld.pos, - field=data, - mean=None if keep_mean else fld.mean, - normalizer=fld.normalizer, - trend=fld.trend, - mesh_type=fld.mesh_type, - value_type=fld.value_type, - check_shape=False, - ) - - -def _check_for_default_normal(fld): - if not type(fld.normalizer) == Normalizer: - raise ValueError( - "transform: need a normal field but there is a normalizer defined" - ) - if fld.trend is not None: - raise ValueError( - "transform: need a normal field but there is a trend defined" - ) - if callable(fld.mean) or fld.mean is None: - raise ValueError( - "transform: need a normal field but mean is not constant" - ) - - -def apply(fld, method, field="field", store=True, process=False, **kwargs): - """ - Apply field transformation. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - method : :class:`str` - Method to use. - See :py:mod:`gstools.transform` for available transformations. - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or with a specified name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - **kwargs - Keyword arguments forwarded to selected method. - - Raises - ------ - ValueError - When method is unknown. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - kwargs["field"] = field - kwargs["store"] = store - kwargs["process"] = process - method = str(method) # ensure method is a string - if method == "binary": - return binary(fld, **kwargs) - if method == "discrete": - return discrete(fld, **kwargs) - if method == "boxcox": - return boxcox(fld, **kwargs) - if method == "zinnharvey": - return zinnharvey(fld, **kwargs) - if method.endswith("force_moments"): - return normal_force_moments(fld, **kwargs) - if method.endswith("lognormal"): - return normal_to_lognormal(fld, **kwargs) - if method.endswith("uniform"): - return normal_to_uniform(fld, **kwargs) - if method.endswith("arcsin"): - return normal_to_arcsin(fld, **kwargs) - if method.endswith("uquad"): - return normal_to_uquad(fld, **kwargs) - if method.endswith("function"): - return apply_function(fld, **kwargs) - raise ValueError(f"transform.apply: unknown method '{method}'") - - -def apply_function( - fld, - function, - field="field", - store=True, - process=False, - keep_mean=True, - **kwargs, -): - """ - Apply function as field transformation. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - function : :any:`callable` - Function to use. - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - **kwargs - Keyword arguments forwarded to given function. - - Raises - ------ - ValueError - When function is not callable. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not callable(function): - raise ValueError("transform.apply_function: function not a 'callable'") - data = fld[field] - name, save = fld.get_store_config(store, default=field) - if process: - data = _pre_process(fld, data, keep_mean=keep_mean) - data = function(data, **kwargs) - if process: - data = _post_process(fld, data, keep_mean=keep_mean) - return fld.post_field(data, name=name, process=False, save=save) - - -def binary( - fld, - divide=None, - upper=None, - lower=None, - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Binary transformation. - - After this transformation, the field only has two values. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - divide : :class:`float`, optional - The dividing value. - Default: ``fld.mean`` - upper : :class:`float`, optional - The resulting upper value of the field. - Default: ``mean + sqrt(fld.model.sill)`` - lower : :class:`float`, optional - The resulting lower value of the field. - Default: ``mean - sqrt(fld.model.sill)`` - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not process and divide is None: - _check_for_default_normal(fld) - mean = 0.0 if process and not keep_mean else fld.mean - divide = mean if divide is None else divide - upper = mean + np.sqrt(fld.model.sill) if upper is None else upper - lower = mean - np.sqrt(fld.model.sill) if lower is None else lower - kw = dict( - values=[lower, upper], - thresholds=[divide], - ) - return apply_function( - fld=fld, - function=array_discrete, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def discrete( - fld, - values, - thresholds="arithmetic", - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Discrete transformation. - - After this transformation, the field has only `len(values)` discrete - values. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - values : :any:`numpy.ndarray` - The discrete values the field will take - thresholds : :class:`str` or :any:`numpy.ndarray`, optional - the thresholds, where the value classes are separated - possible values are: - * "arithmetic": the mean of the 2 neighbouring values - * "equal": divide the field into equal parts - * an array of explicitly given thresholds - Default: "arithmetic" - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not process and thresholds == "equal": - _check_for_default_normal(fld) - kw = dict( - values=values, - thresholds=thresholds, - mean=0.0 if process and not keep_mean else fld.mean, - var=fld.model.sill, - ) - return apply_function( - fld=fld, - function=array_discrete, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def boxcox( - fld, - lmbda=1, - shift=0, - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - (Inverse) Box-Cox transformation to denormalize data. - - After this transformation, the again Box-Cox transformed field is normal - distributed. - - See: https://en.wikipedia.org/wiki/Power_transform#Box%E2%80%93Cox_transformation - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - lmbda : :class:`float`, optional - The lambda parameter of the Box-Cox transformation. - For ``lmbda=0`` one obtains the log-normal transformation. - Default: ``1`` - shift : :class:`float`, optional - The shift parameter from the two-parametric Box-Cox transformation. - The field will be shifted by that value before transformation. - Default: ``0`` - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - kw = dict(lmbda=lmbda, shift=shift) - return apply_function( - fld=fld, - function=array_boxcox, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def zinnharvey( - fld, - conn="high", - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Zinn and Harvey transformation to connect low or high values. - - After this transformation, the field is still normal distributed. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - conn : :class:`str`, optional - Desired connectivity. Either "low" or "high". - Default: "high" - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not process: - _check_for_default_normal(fld) - kw = dict( - conn=conn, - mean=0.0 if process and not keep_mean else fld.mean, - var=fld.model.sill, - ) - return apply_function( - fld=fld, - function=array_zinnharvey, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def normal_force_moments( - fld, - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Force moments of a normal distributed field. - - After this transformation, the field is still normal distributed. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not process: - _check_for_default_normal(fld) - kw = dict( - mean=0.0 if process and not keep_mean else fld.mean, var=fld.model.sill - ) - return apply_function( - fld=fld, - function=array_force_moments, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def normal_to_lognormal( - fld, field="field", store=True, process=False, keep_mean=True -): - """ - Transform normal distribution to log-normal distribution. - - After this transformation, the field is log-normal distributed. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - return apply_function( - fld=fld, - function=array_to_lognormal, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - ) - - -def normal_to_uniform( - fld, - low=0.0, - high=1.0, - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Transform normal distribution to uniform distribution on [0, 1]. - - After this transformation, the field is uniformly distributed on [0, 1]. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - low : :class:`float`, optional - Lower bound for the uniform distribution. - Default: 0.0 - high : :class:`float`, optional - Upper bound for the uniform distribution. - Default: 1.0 - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - """ - if not process: - _check_for_default_normal(fld) - kw = dict( - mean=0.0 if process and not keep_mean else fld.mean, - var=fld.model.sill, - low=low, - high=high, - ) - return apply_function( - fld=fld, - function=array_to_uniform, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def normal_to_arcsin( - fld, - a=None, - b=None, - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Transform normal distribution to the bimodal arcsin distribution. - - See: https://en.wikipedia.org/wiki/Arcsine_distribution - - After this transformation, the field is arcsin-distributed on [a, b]. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - a : :class:`float`, optional - Parameter a of the arcsin distribution (lower bound). - Default: keep mean and variance - b : :class:`float`, optional - Parameter b of the arcsin distribution (upper bound). - Default: keep mean and variance - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not process: - _check_for_default_normal(fld) - kw = dict( - mean=0.0 if process and not keep_mean else fld.mean, - var=fld.model.sill, - a=a, - b=b, - ) - return apply_function( - fld=fld, - function=array_to_arcsin, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) - - -def normal_to_uquad( - fld, - a=None, - b=None, - field="field", - store=True, - process=False, - keep_mean=True, -): - """ - Transform normal distribution to U-quadratic distribution. - - See: https://en.wikipedia.org/wiki/U-quadratic_distribution - - After this transformation, the field is U-quadratic-distributed on [a, b]. - - Parameters - ---------- - fld : :any:`Field` - Field class containing a generated field. - a : :class:`float`, optional - Parameter a of the U-quadratic distribution (lower bound). - Default: keep mean and variance - b : :class:`float`, optional - Parameter b of the U-quadratic distribution (upper bound). - Default: keep mean and variance - field : :class:`str`, optional - Name of field to be transformed. The default is "field". - store : :class:`str` or :class:`bool`, optional - Whether to store field inplace (True/False) or under a given name. - The default is True. - process : :class:`bool`, optional - Whether to process in/out fields with trend, normalizer and mean - of given Field instance. The default is False. - keep_mean : :class:`bool`, optional - Whether to keep the mean of the field if process=True. - The default is True. - - Returns - ------- - :class:`numpy.ndarray` - Transformed field. - """ - if not process: - _check_for_default_normal(fld) - kw = dict( - mean=0.0 if process and not keep_mean else fld.mean, - var=fld.model.sill, - a=a, - b=b, - ) - return apply_function( - fld=fld, - function=array_to_uquad, - field=field, - store=store, - process=process, - keep_mean=keep_mean, - **kw, - ) diff --git a/src/gstools_cython/variogram/__init__.py b/src/gstools_cython/variogram/__init__.py deleted file mode 100644 index d8a5b238..00000000 --- a/src/gstools_cython/variogram/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -""" -GStools subpackage providing tools for estimating and fitting variograms. - -.. currentmodule:: gstools.variogram - -Variogram estimation -^^^^^^^^^^^^^^^^^^^^ - -.. autosummary:: - :toctree: - - vario_estimate - vario_estimate_axis - -Binning -^^^^^^^ - -.. autosummary:: - :toctree: - - standard_bins - ----- -""" - -from gstools.variogram.binning import standard_bins -from gstools.variogram.variogram import ( - vario_estimate, - vario_estimate_axis, - vario_estimate_structured, - vario_estimate_unstructured, -) - -__all__ = [ - "vario_estimate", - "vario_estimate_axis", - "vario_estimate_unstructured", - "vario_estimate_structured", - "standard_bins", -] diff --git a/src/gstools_cython/variogram/binning.py b/src/gstools_cython/variogram/binning.py deleted file mode 100644 index 86d4fdc2..00000000 --- a/src/gstools_cython/variogram/binning.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -GStools subpackage providing binning routines. - -.. currentmodule:: gstools.variogram.binning - -The following functions are provided - -.. autosummary:: - standard_bins -""" - -import numpy as np - -from gstools.tools import RADIAN_SCALE -from gstools.tools.geometric import ( - chordal_to_great_circle, - format_struct_pos_dim, - generate_grid, - latlon2pos, -) - -__all__ = ["standard_bins"] - - -def _sturges(pnt_cnt): - return int(np.ceil(2 * np.log2(pnt_cnt) + 1)) - - -def standard_bins( - pos=None, - dim=2, - latlon=False, - mesh_type="unstructured", - bin_no=None, - max_dist=None, - geo_scale=RADIAN_SCALE, -): - r""" - Get standard binning. - - Parameters - ---------- - pos : :class:`list`, optional - the position tuple, containing either the point coordinates (x, y, ...) - or the axes descriptions (for mesh_type='structured') - dim : :class:`int`, optional - Field dimension. - latlon : :class:`bool`, optional - Whether the data is representing 2D fields on earths surface described - by latitude and longitude. When using this, the estimator will - use great-circle distance for variogram estimation. - Note, that only an isotropic variogram can be estimated and a - ValueError will be raised, if a direction was specified. - Bin edges need to be given in radians in this case. - Default: False - mesh_type : :class:`str`, optional - 'structured' / 'unstructured', indicates whether the pos tuple - describes the axis or the point coordinates. - Default: `'unstructured'` - bin_no: :class:`int`, optional - number of bins to create. If None is given, will be determined by - Sturges' rule from the number of points. - Default: None - max_dist: :class:`float`, optional - Cut of length for the bins. If None is given, it will be set to one - third of the box-diameter from the given points. - Default: None - geo_scale : :class:`float`, optional - Geographic unit scaling in case of latlon coordinates to get a - meaningful bins unit. - By default, bins are assumed to be in radians with latlon=True. - Can be set to :any:`KM_SCALE` to have bins in km or - :any:`DEGREE_SCALE` to have bins in degrees. - Default: :any:`RADIAN_SCALE` - - Returns - ------- - :class:`numpy.ndarray` - The generated bin edges. - - Notes - ----- - Internally uses double precision and also returns doubles. - """ - dim = 2 if latlon else int(dim) - if bin_no is None or max_dist is None: - if pos is None: - raise ValueError("standard_bins: no pos tuple given.") - if mesh_type != "unstructured": - pos = generate_grid(format_struct_pos_dim(pos, dim)[0]) - else: - pos = np.asarray(pos, dtype=np.double).reshape(dim, -1) - pos = latlon2pos(pos, radius=geo_scale) if latlon else pos - pnt_cnt = len(pos[0]) - box = [] - for axis in pos: - box.append([np.min(axis), np.max(axis)]) - box = np.asarray(box) - diam = np.linalg.norm(box[:, 0] - box[:, 1]) - # convert diameter to great-circle distance if using latlon - diam = chordal_to_great_circle(diam, geo_scale) if latlon else diam - bin_no = _sturges(pnt_cnt) if bin_no is None else int(bin_no) - max_dist = diam / 3 if max_dist is None else float(max_dist) - return np.linspace(0, max_dist, num=bin_no + 1, dtype=np.double) diff --git a/src/gstools_cython/variogram/variogram.py b/src/gstools_cython/variogram/variogram.py deleted file mode 100644 index afcf336f..00000000 --- a/src/gstools_cython/variogram/variogram.py +++ /dev/null @@ -1,499 +0,0 @@ -""" -GStools subpackage providing tools for estimating and fitting variograms. - -.. currentmodule:: gstools.variogram.variogram - -The following functions are provided - -.. autosummary:: - vario_estimate - vario_estimate_axis -""" - -# pylint: disable=C0412 -import numpy as np - -from gstools import config -from gstools.normalizer.tools import remove_trend_norm_mean -from gstools.tools import RADIAN_SCALE -from gstools.tools.geometric import ( - ang2dir, - format_struct_pos_shape, - format_unstruct_pos_shape, - generate_grid, -) -from gstools.variogram.binning import standard_bins - -if config.USE_RUST: # pragma: no cover - # pylint: disable=E0401 - from gstools_core import variogram_directional as directional - from gstools_core import variogram_ma_structured as ma_structured - from gstools_core import variogram_structured as structured - from gstools_core import variogram_unstructured as unstructured -else: - from gstools.variogram.estimator import ( - directional, - ma_structured, - structured, - unstructured, - ) - -__all__ = [ - "vario_estimate", - "vario_estimate_axis", - "vario_estimate_unstructured", - "vario_estimate_structured", -] - - -AXIS = ["x", "y", "z"] -AXIS_DIR = {"x": 0, "y": 1, "z": 2} - - -def _set_estimator(estimator): - """Translate the verbose Python estimator identifier to single char.""" - if estimator.lower() == "matheron": - cython_estimator = "m" - elif estimator.lower() == "cressie": - cython_estimator = "c" - else: - raise ValueError(f"Unknown variogram estimator function: {estimator}") - return cython_estimator - - -def _separate_dirs_test(direction, angles_tol): - """Check if given directions are separated.""" - if direction is None or direction.shape[0] < 2: - return True - separate_dirs = True - for i in range(direction.shape[0] - 1): - for j in range(i + 1, direction.shape[0]): - s_prod = np.minimum(np.abs(np.dot(direction[i], direction[j])), 1) - separate_dirs &= np.arccos(s_prod) >= 2 * angles_tol - # gstools-core doesn't like the type `numpy.bool_` - return bool(separate_dirs) - - -def vario_estimate( - pos, - field, - bin_edges=None, - sampling_size=None, - sampling_seed=None, - estimator="matheron", - latlon=False, - direction=None, - angles=None, - angles_tol=np.pi / 8, - bandwidth=None, - no_data=np.nan, - mask=np.ma.nomask, - mesh_type="unstructured", - return_counts=False, - mean=None, - normalizer=None, - trend=None, - fit_normalizer=False, - geo_scale=RADIAN_SCALE, - **std_bins, -): - r""" - Estimates the empirical variogram. - - The algorithm calculates following equation: - - .. math:: - \gamma(r_k) = \frac{1}{2 N(r_k)} \sum_{i=1}^{N(r_k)} (z(\mathbf x_i) - - z(\mathbf x_i'))^2 \; , - - with :math:`r_k \leq \| \mathbf x_i - \mathbf x_i' \| < r_{k+1}` - being the bins. - - Or if the estimator "cressie" was chosen: - - .. math:: - \gamma(r_k) = \frac{\frac{1}{2}\left(\frac{1}{N(r_k)}\sum_{i=1}^{N(r_k)} - \left|z(\mathbf x_i) - z(\mathbf x_i')\right|^{0.5}\right)^4} - {0.457 + 0.494 / N(r_k) + 0.045 / N^2(r_k)} \; , - - with :math:`r_k \leq \| \mathbf x_i - \mathbf x_i' \| < r_{k+1}` - being the bins. - The Cressie estimator is more robust to outliers [Webster2007]_. - - By providing `direction` vector[s] or angles, a directional variogram - can be calculated. If multiple directions are given, a set of variograms - will be returned. - Directional bining is controlled by a given angle tolerance (`angles_tol`) - and an optional `bandwidth`, that truncates the width of the search band - around the given direction[s]. - - To reduce the calculation time, `sampling_size` could be passed to sample - down the number of field points. - - Parameters - ---------- - pos : :class:`list` - the position tuple, containing either the point coordinates (x, y, ...) - or the axes descriptions (for mesh_type='structured') - field : :class:`numpy.ndarray` or :class:`list` of :class:`numpy.ndarray` - The spatially distributed data. - Can also be of type :class:`numpy.ma.MaskedArray` to use masked values. - You can pass a list of fields, that will be used simultaneously. - This could be helpful, when there are multiple realizations at the - same points, with the same statistical properties. - bin_edges : :class:`numpy.ndarray`, optional - the bins on which the variogram will be calculated. - If :any:`None` are given, standard bins provided by the - :any:`standard_bins` routine will be used. Default: :any:`None` - sampling_size : :class:`int` or :any:`None`, optional - for large input data, this method can take a long - time to compute the variogram, therefore this argument specifies - the number of data points to sample randomly - Default: :any:`None` - sampling_seed : :class:`int` or :any:`None`, optional - seed for samples if sampling_size is given. - Default: :any:`None` - estimator : :class:`str`, optional - the estimator function, possible choices: - - * "matheron": the standard method of moments of Matheron - * "cressie": an estimator more robust to outliers - - Default: "matheron" - latlon : :class:`bool`, optional - Whether the data is representing 2D fields on earths surface described - by latitude and longitude. When using this, the estimator will - use great-circle distance for variogram estimation. - Note, that only an isotropic variogram can be estimated and a - ValueError will be raised, if a direction was specified. - Bin edges need to be given in radians in this case. - Default: False - direction : :class:`list` of :class:`numpy.ndarray`, optional - directions to evaluate a directional variogram. - Angular tolerance is given by `angles_tol`. - bandwidth to cut off how wide the search for point pairs should be - is given by `bandwidth`. - You can provide multiple directions at once to get one variogram - for each direction. - For a single direction you can also use the `angles` parameter, - to provide the direction by its spherical coordinates. - Default: :any:`None` - angles : :class:`numpy.ndarray`, optional - the angles of the main axis to calculate the variogram for in radians - angle definitions from ISO standard 80000-2:2009 - for 1d this parameter will have no effect at all - for 2d supply one angle which is - azimuth :math:`\varphi` (ccw from +x in xy plane) - for 3d supply two angles which are - azimuth :math:`\varphi` (ccw from +x in xy plane) - and inclination :math:`\theta` (cw from +z). - Can be used instead of direction. - Default: :any:`None` - angles_tol : class:`float`, optional - the tolerance around the variogram angle to count a point as being - within this direction from another point (the angular tolerance around - the directional vector given by angles) - Default: `np.pi/8` = 22.5° - bandwidth : class:`float`, optional - bandwidth to cut off the angular tolerance for directional variograms. - If None is given, only the `angles_tol` parameter will control the - point selection. - Default: :any:`None` - no_data : :class:`float`, optional - Value to identify missing data in the given field. - Default: `numpy.nan` - mask : :class:`numpy.ndarray` of :class:`bool`, optional - Mask to deselect data in the given field. - Default: :any:`numpy.ma.nomask` - mesh_type : :class:`str`, optional - 'structured' / 'unstructured', indicates whether the pos tuple - describes the axis or the point coordinates. - Default: `'unstructured'` - return_counts: :class:`bool`, optional - if set to true, this function will also return the number of data - points found at each lag distance as a third return value - Default: False - mean : :class:`float`, optional - mean value used to shift normalized input data. - Can also be a callable. The default is None. - normalizer : :any:`None` or :any:`Normalizer`, optional - Normalizer to be applied to the input data to gain normality. - The default is None. - trend : :any:`None` or :class:`float` or :any:`callable`, optional - A callable trend function. Should have the signature: f(x, [y, z, ...]) - If no normalizer is applied, this behaves equal to 'mean'. - The default is None. - fit_normalizer : :class:`bool`, optional - Whether to fit the data-normalizer to the given (detrended) field. - Default: False - geo_scale : :class:`float`, optional - Geographic unit scaling in case of latlon coordinates to get a - meaningful bins unit. - By default, bins are assumed to be in radians with latlon=True. - Can be set to :any:`KM_SCALE` to have bins in km or - :any:`DEGREE_SCALE` to have bins in degrees. - Default: :any:`RADIAN_SCALE` - **std_bins - Optional arguments that are forwarded to the :any:`standard_bins` routine - if no bins are given (bin_no, max_dist). - - Returns - ------- - bin_centers : (n), :class:`numpy.ndarray` - The bin centers. - gamma : (n) or (d, n), :class:`numpy.ndarray` - The estimated variogram values at bin centers. - Is stacked if multiple `directions` (d>1) are given. - counts : (n) or (d, n), :class:`numpy.ndarray`, optional - The number of point pairs found for each bin. - Is stacked if multiple `directions` (d>1) are given. - Only provided if `return_counts` is True. - normalizer : :any:`Normalizer`, optional - The fitted normalizer for the given data. - Only provided if `fit_normalizer` is True. - - Notes - ----- - Internally uses double precision and also returns doubles. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - if bin_edges is not None: - bin_edges = np.atleast_1d(np.asarray(bin_edges, dtype=np.double)) - bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2.0 - # allow multiple fields at same positions (ndmin=2: first axis -> field ID) - # need to convert to ma.array, since list of ma.array is not recognised - field = np.ma.array(field, ndmin=2, dtype=np.double, copy=True) - masked = np.ma.is_masked(field) or np.any(mask) - # catch special case if everything is masked - if masked and np.all(mask): - bin_centers = np.empty(0) if bin_edges is None else bin_centers - estimates = np.zeros_like(bin_centers) - if return_counts: - return bin_centers, estimates, np.zeros_like(estimates, dtype=int) - return bin_centers, estimates - if not masked: - field = field.filled() - # check mesh shape - if mesh_type != "unstructured": - pos, __, dim = format_struct_pos_shape( - pos, field.shape, check_stacked_shape=True - ) - pos = generate_grid(pos) - else: - pos, __, dim = format_unstruct_pos_shape( - pos, field.shape, check_stacked_shape=True - ) - if latlon and dim != 2: - raise ValueError("Variogram: given field needs to be 2D for lat-lon.") - # prepare the field - pnt_cnt = len(pos[0]) - field = field.reshape((-1, pnt_cnt)) - # apply mask if wanted - if masked: - # if fields have different masks, take the minimal common mask - # given mask will be applied in addition - # selected region is the inverted masked (unmasked values) - if np.size(mask) > 1: # not only np.ma.nomask - select = np.invert( - np.logical_or( - np.reshape(mask, pnt_cnt), np.all(field.mask, axis=0) - ) - ) - else: - select = np.invert(np.all(field.mask, axis=0)) - pos = pos[:, select] - field.fill_value = np.nan # use no-data val. for remaining masked vals - field = field[:, select].filled() # convert to ndarray - select = mask = None # free space - # set no_data values - if not np.isnan(no_data): - field[np.isclose(field, float(no_data))] = np.nan - # set directions - dir_no = 0 - if direction is not None and dim > 1: - direction = np.atleast_2d(np.asarray(direction, dtype=np.double)) - if len(direction.shape) > 2: - raise ValueError(f"Can't interpret directions: {direction}") - if direction.shape[1] != dim: - raise ValueError(f"Can't interpret directions: {direction}") - dir_no = direction.shape[0] - # convert given angles to direction vector - if angles is not None and direction is None and dim > 1: - direction = ang2dir(angles=angles, dtype=np.double, dim=dim) - dir_no = direction.shape[0] - # prepare directional variogram - if dir_no > 0: - if latlon: - raise ValueError("Directional variogram not allowed for lat-lon.") - norms = np.linalg.norm(direction, axis=1) - if np.any(np.isclose(norms, 0)): - raise ValueError(f"Zero length directions: {direction}") - # only unit-vectors for directions - direction = np.divide(direction, norms[:, np.newaxis]) - # negative bandwidth to turn it off - bandwidth = float(bandwidth) if bandwidth is not None else -1.0 - angles_tol = float(angles_tol) - # prepare sampled variogram - if sampling_size is not None and sampling_size < pnt_cnt: - sampled_idx = np.random.RandomState(sampling_seed).choice( - np.arange(pnt_cnt), sampling_size, replace=False - ) - field = field[:, sampled_idx] - pos = pos[:, sampled_idx] - # create bins - if bin_edges is None: - bin_edges = standard_bins( - pos, dim, latlon, geo_scale=geo_scale, **std_bins - ) - bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2.0 - if latlon: - # internally we always use radians - bin_edges /= geo_scale - # normalize field - norm_field_out = remove_trend_norm_mean( - *(pos, field, mean, normalizer, trend), - check_shape=False, - stacked=True, - fit_normalizer=fit_normalizer, - ) - field = norm_field_out[0] if fit_normalizer else norm_field_out - norm_out = (norm_field_out[1],) if fit_normalizer else () - # select variogram estimator - cython_estimator = _set_estimator(estimator) - # run - if dir_no == 0: - # "h"aversine or "e"uclidean distance type - distance_type = "h" if latlon else "e" - estimates, counts = unstructured( - field, - bin_edges, - pos, - estimator_type=cython_estimator, - distance_type=distance_type, - num_threads=config.NUM_THREADS, - ) - else: - estimates, counts = directional( - field, - bin_edges, - pos, - direction, - angles_tol, - bandwidth, - separate_dirs=_separate_dirs_test(direction, angles_tol), - estimator_type=cython_estimator, - num_threads=config.NUM_THREADS, - ) - if dir_no == 1: - estimates, counts = estimates[0], counts[0] - est_out = (estimates, counts) - return (bin_centers,) + est_out[: 2 if return_counts else 1] + norm_out - - -def vario_estimate_axis( - field, direction="x", estimator="matheron", no_data=np.nan -): - r"""Estimates the variogram along array axis. - - The indices of the given direction are used for the bins. - Uniform spacings along the given axis are assumed. - - The algorithm calculates following equation: - - .. math:: - \gamma(r_k) = \frac{1}{2 N(r_k)} \sum_{i=1}^{N(r_k)} (z(\mathbf x_i) - - z(\mathbf x_i'))^2 \; , - - with :math:`r_k \leq \| \mathbf x_i - \mathbf x_i' \| < r_{k+1}` - being the bins. - - Or if the estimator "cressie" was chosen: - - .. math:: - \gamma(r_k) = \frac{\frac{1}{2}\left(\frac{1}{N(r_k)}\sum_{i=1}^{N(r_k)} - \left|z(\mathbf x_i) - z(\mathbf x_i')\right|^{0.5}\right)^4} - {0.457 + 0.494 / N(r_k) + 0.045 / N^2(r_k)} \; , - - with :math:`r_k \leq \| \mathbf x_i - \mathbf x_i' \| < r_{k+1}` - being the bins. - The Cressie estimator is more robust to outliers [Webster2007]_. - - Parameters - ---------- - field : :class:`numpy.ndarray` or :class:`numpy.ma.MaskedArray` - the spatially distributed data (can be masked) - direction : :class:`str` or :class:`int` - the axis over which the variogram will be estimated (x, y, z) - or (0, 1, 2, ...) - estimator : :class:`str`, optional - the estimator function, possible choices: - - * "matheron": the standard method of moments of Matheron - * "cressie": an estimator more robust to outliers - - Default: "matheron" - - no_data : :class:`float`, optional - Value to identify missing data in the given field. - Default: `numpy.nan` - - Returns - ------- - :class:`numpy.ndarray` - the estimated variogram along the given direction. - - Warnings - -------- - It is assumed that the field is defined on an equidistant Cartesian grid. - - Notes - ----- - Internally uses double precision and also returns doubles. - - References - ---------- - .. [Webster2007] Webster, R. and Oliver, M. A. - "Geostatistics for environmental scientists.", - John Wiley & Sons. (2007) - """ - missing_mask = ( - np.isnan(field) if np.isnan(no_data) else np.isclose(field, no_data) - ) - missing = np.any(missing_mask) - masked = np.ma.is_masked(field) or missing - if masked: - field = np.ma.array(field, ndmin=1, dtype=np.double) - if missing: - field.mask = np.logical_or(field.mask, missing_mask) - mask = np.ma.getmaskarray(field) - if not config.USE_RUST: - mask = np.asarray(mask, dtype=np.int32) - else: - field = np.atleast_1d(np.asarray(field, dtype=np.double)) - missing_mask = None # free space - - axis_to_swap = AXIS_DIR[direction] if direction in AXIS else int(direction) - # desired axis first, convert to 2D array afterwards - field = field.swapaxes(0, axis_to_swap) - field = field.reshape((field.shape[0], -1)) - if masked: - mask = mask.swapaxes(0, axis_to_swap) - mask = mask.reshape((mask.shape[0], -1)) - - cython_estimator = _set_estimator(estimator) - - if masked: - return ma_structured( - field, mask, cython_estimator, num_threads=config.NUM_THREADS - ) - return structured(field, cython_estimator, num_threads=config.NUM_THREADS) - - -# for backward compatibility -vario_estimate_unstructured = vario_estimate -vario_estimate_structured = vario_estimate_axis From 6abed37ef4416928102a7f4310b7a16606b016ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20M=C3=BCller?= Date: Thu, 11 Jul 2024 23:56:24 +0200 Subject: [PATCH 04/26] remove gstools content --- .github/workflows/main.yml | 8 +- .gitignore | 2 +- CHANGELOG.md | 460 +-------------------------------- MANIFEST.in | 2 +- README.md | 288 +-------------------- docs/source/api.rst | 8 +- docs/source/conf.py | 6 +- pyproject.toml | 28 +- setup.py | 12 +- src/gstools_cython/__init__.py | 228 +--------------- 10 files changed, 45 insertions(+), 997 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7d1c7d01..ceb1dade 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -46,11 +46,11 @@ jobs: - name: pylint check run: | - python -m pylint src/gstools/ + python -m pylint src/gstools_cython/ - name: cython-lint check run: | - cython-lint src/gstools/ + cython-lint src/gstools_cython/ build_wheels: name: wheels for ${{ matrix.os }} @@ -112,7 +112,7 @@ jobs: python -m pip install --upgrade pip pip install build "coveralls>=3.0.0" - - name: Install GSTools + - name: Install GSTools-Cython env: GSTOOLS_BUILD_PARALLEL: 1 run: | @@ -124,7 +124,7 @@ jobs: run: | pip install "numpy${{ matrix.ver.np }}" "scipy${{ matrix.ver.sp }}" python -m pytest --cov gstools --cov-report term-missing -v tests/ - python -m coveralls --service=github + # python -m coveralls --service=github - name: Build sdist run: | diff --git a/.gitignore b/.gitignore index bcdc980b..5334b8ef 100644 --- a/.gitignore +++ b/.gitignore @@ -112,7 +112,7 @@ info/ *.cpp # generated version file -src/gstools/_version.py +src/gstools_cython/_version.py # generated docs docs/source/examples/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 20fb771b..b1c868b8 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,462 +1,14 @@ # Changelog -All notable changes to **GSTools** will be documented in this file. +All notable changes to **GSTools-Cython** will be documented in this file. -## [1.5.2] - Nifty Neon - 2024-05 +## [1.0.0] - 2024-07 -### Enhancements - -- added global variable `config.NUM_THREADS` to select number of threads for parallel computation ([#336](https://github.com/GeoStat-Framework/GSTools/pull/336)) -- speed up sampling with emcee by setting `vectorize=True` in `EnsembleSampler` ([#346](https://github.com/GeoStat-Framework/GSTools/pull/346)) -- prepare numpy 2 support ([#340](https://github.com/GeoStat-Framework/GSTools/pull/340)) - - at least numpy 2.0.0rc1 for building extensions (for Python 3.9 and above) - - check multiple numpy and scipy versions in CI - - fixed minimal versions for numpy - - use `np.asarray` everywhere with `np.atleast_(n)d` - - fix long/longlong integer issue in cython on windows by always using 64bit integers - -### Bugfixes -- build docs with latest sphinx version ([#340](https://github.com/GeoStat-Framework/GSTools/pull/340)) -- fixed zero division error in spectral density of Integral model ([#347](https://github.com/GeoStat-Framework/GSTools/pull/347)) -- minor pylint fixes for used-before-assignment issues ([#350](https://github.com/GeoStat-Framework/GSTools/pull/350)) - -### Changes -- require pyvista 0.40 at least ([#340](https://github.com/GeoStat-Framework/GSTools/pull/340)) -- require matplotlib 3.7 at least ([#350](https://github.com/GeoStat-Framework/GSTools/pull/350)) -- remove universal2 wheels for macos (we already provide separate intel and arm64 wheels) ([#350](https://github.com/GeoStat-Framework/GSTools/pull/350)) - - -## [1.5.1] - Nifty Neon - 2023-11 - -### Enhancements - -see [#317](https://github.com/GeoStat-Framework/GSTools/pull/317) - -- added wheels for Python 3.12 -- dropped support for Python 3.7 (EOL) -- linted Cython files with cython-lint -- use Cython 3 to build extensions - - -## [1.5.0] - Nifty Neon - 2023-06 - -### Enhancements -- added `temporal` flag to `CovModel` to explicitly specify spatio-temporal models [#308](https://github.com/GeoStat-Framework/GSTools/pull/308) - - rotation between spatial and temporal dimension will be ignored - - added `spatial_dim` to `CovModel` to explicitly set spatial dimension for spatio-temporal models - - if not using `spatial_dim`, the provided `dim` needs to include the possible temporal dimension - - `spatial_dim` is always one less than `field_dim` for spatio-temporal models - - also works with `latlon=True` to have a spatio-temporal model with geographic coordinates - - all plotting routines respect this - - the `Field` class now has a `temporal` attribute which forwards the model attribute - - automatic variogram fitting in kriging classes for `temporal=True` and `latlon=True` will raise an error -- added `geo_scale` to `CovModel` to have a more consistent way to set the units of the model length scale for geographic coordinates [#308](https://github.com/GeoStat-Framework/GSTools/pull/308) - - no need to use `rescale` for this anymore (was rather a hack) - - added `gs.KM_SCALE` which is the same as `gs.EARTH_RADIUS` for kilometer scaling - - added `gs.DEGREE_SCALE` for great circle distance in degrees - - added `gs.RADIAN_SCALE` for great circle distance in radians (default and previous behavior) - - yadrenko variogram respects this and assumes the great circle distances is given in the respective unit - - `vario_estimate` also has `geo_scale` now to control the units of the bins -- `vario_estimate` now forwards additional kwargs to `standard_bins` (`bin_no`, `max_dist`) [#308](https://github.com/GeoStat-Framework/GSTools/pull/308) -- added `low` and `high` arguments to `uniform` transformation [#310](https://github.com/GeoStat-Framework/GSTools/pull/310) - -### Changes -- `CovModel`s expect special arguments by keyword now [#308](https://github.com/GeoStat-Framework/GSTools/pull/308) -- always use f-strings internally [#283](https://github.com/GeoStat-Framework/GSTools/pull/283) -- removed `verbose` attribute from `RandMeth` classes [#309](https://github.com/GeoStat-Framework/GSTools/pull/309) -- all arguments for `RandMeth` classes key-word-only now except `model` [#309](https://github.com/GeoStat-Framework/GSTools/pull/309) -- rename "package" to "api" in doc structure [#290](https://github.com/GeoStat-Framework/GSTools/pull/290) - -### Bugfixes -- latex equations were not rendered correctly in docs [#290](https://github.com/GeoStat-Framework/GSTools/pull/290) - - -## [1.4.1] - Sassy Sapphire - 2022-11 - -### Enhancements -- new (Exponential-) Integral model added [#243](https://github.com/GeoStat-Framework/GSTools/pull/243) -- added wheels for Python 3.11 [#272](https://github.com/GeoStat-Framework/GSTools/pull/272) - -### Changes -- API documentation is polished and fully auto-generated now [#271](https://github.com/GeoStat-Framework/GSTools/pull/271) - -### Bugfixes -- fixed approximation of `Matern.spectrum` for big `nu` [#243](https://github.com/GeoStat-Framework/GSTools/pull/243) -- GSTools had wrong version when installed from git archive [#272](https://github.com/GeoStat-Framework/GSTools/pull/272) -- Field.plot: solve long-standing mpl slider bug [#273](https://github.com/GeoStat-Framework/GSTools/pull/273) - - -## [1.4.0] - Sassy Sapphire - 2022-08 - -### Enhancements -- added Youtube tutorial to documentation [#239](https://github.com/GeoStat-Framework/GSTools/pull/239) -- better support for custom generators [#250](https://github.com/GeoStat-Framework/GSTools/pull/250) [#259](https://github.com/GeoStat-Framework/GSTools/pull/259) -- add `valid_value_types` class variable to all field classes [#250](https://github.com/GeoStat-Framework/GSTools/pull/250) -- PyKrige: fix passed variogram in case of latlon models [#254](https://github.com/GeoStat-Framework/GSTools/pull/254) -- add bounds checks for optional arguments of `CovModel` when resetting by class attribute [#255](https://github.com/GeoStat-Framework/GSTools/pull/255) -- minor coverage improvements [#255](https://github.com/GeoStat-Framework/GSTools/pull/255) -- documentation: readability improvements [#257](https://github.com/GeoStat-Framework/GSTools/pull/257) - -### Changes -- drop Python 3.6 support (setuptools>60 needs py>3.7) [#241](https://github.com/GeoStat-Framework/GSTools/pull/241) -- move `setup.cfg` content to `pyproject.toml` ([PEP 621](https://peps.python.org/pep-0621/)) [#241](https://github.com/GeoStat-Framework/GSTools/pull/241) -- move to `src/` based package structure (better testing, building and structure) [#241](https://github.com/GeoStat-Framework/GSTools/pull/241) -- use [extension-helpers](https://pypi.org/project/extension-helpers/) for openmp support in `setup.py` [#241](https://github.com/GeoStat-Framework/GSTools/pull/241) -- increase minimal version of meshio to v5.1 [#241](https://github.com/GeoStat-Framework/GSTools/pull/241) - -### Bugfixes -- Pyvista v0.32 deprecation warning: use point_data instead of point_arrays [#237](https://github.com/GeoStat-Framework/GSTools/pull/237) -- remove deprecated scipy (v1.9) method pinv2 [#247](https://github.com/GeoStat-Framework/GSTools/pull/247) -- change float comparison in tests [#248](https://github.com/GeoStat-Framework/GSTools/pull/248) -- Cython: solve `-Wsometimes-uninitialized` warning [#255](https://github.com/GeoStat-Framework/GSTools/pull/255) - - -## [1.3.5] - Pure Pink - 2022-01 - -### Changes -- remove caps for dependencies [#229](https://github.com/GeoStat-Framework/GSTools/pull/229) -- build linux wheels with manylinux2014 for all versions ([CIBW v2.3.1](https://github.com/pypa/cibuildwheel/releases/tag/v2.3.1)) [#227](https://github.com/GeoStat-Framework/GSTools/pull/227) - -### Bugfixes -- `Field.mesh` was not compatible with [meshio](https://github.com/nschloe/meshio) v5.1+ [#227](https://github.com/GeoStat-Framework/GSTools/pull/227) - - -## [1.3.4] - Pure Pink - 2021-11 - -### Enhancements -- add GStools-Core as optional dependency [#215](https://github.com/GeoStat-Framework/GSTools/pull/215) -- provide wheels for Python 3.10 [#211](https://github.com/GeoStat-Framework/GSTools/pull/211) -- provide macOS wheels for Apple Silicon [#211](https://github.com/GeoStat-Framework/GSTools/pull/211) - -### Changes -- remove unnecessary `dim` argument in Cython code [#216](https://github.com/GeoStat-Framework/GSTools/issues/216) - - -## [1.3.3] - Pure Pink - 2021-08 - -### Enhancements -See: [#197](https://github.com/GeoStat-Framework/GSTools/issues/197) -- `gstools.transform`: - - add keywords `field`, `store`, `process` and `keep_mean` to all transformations to control storage and respect `normalizer` - - added `apply_function` transformation - - added `apply` as wrapper for all transformations - - added `transform` method to all `Field` (sub)classes as interface to `transform.apply` - - added checks for normal fields to work smoothly with recently added `normalizer` submodule -- `Field`: - - allow naming fields when generating and control storage with `store` keyword - - all subclasses now have the `post_process` keyword (apply mean, normalizer, trend) - - added subscription to access fields by name (`Field["field"]`) - - added `set_pos` method to set position tuple - - allow reusing present `pos` tuple - - added `pos`, `mesh_type`, `field_names`, `field_shape`, `all_fields` properties -- `CondSRF`: - - memory optimization by forwarding `pos` from underlying `krige` instance - - only recalculate kriging field if `pos` tuple changed (optimized ensemble generation) -- performance improvement by using `np.asarray` instead of `np.array` where possible -- updated examples to use new features -- added incomplete lower gamma function `inc_gamma_low` (for TPLGaussian spectral density) -- filter `nan` values from `cond_val` array in all kriging routines [#201](https://github.com/GeoStat-Framework/GSTools/issues/201) - -### Bugfixes -- `inc_gamma` was defined wrong for integer `s < 0` - - -## [1.3.2] - Pure Pink - 2021-07 - -### Bugfixes -- `vario_estimate` was altering the input field under certain circumstances [#180](https://github.com/GeoStat-Framework/GSTools/issues/180) -- `emcee` v3.1 now requires `nsteps` in `run_mcmc()` to be integer (called in `RNG.sample_ln_pdf`) [#184](https://github.com/GeoStat-Framework/GSTools/pull/184) - - -## [1.3.1] - Pure Pink - 2021-06 - -### Enhancements -- Standalone use of Field class [#166](https://github.com/GeoStat-Framework/GSTools/issues/166) -- add social badges in README [#169](https://github.com/GeoStat-Framework/GSTools/issues/169), [#170](https://github.com/GeoStat-Framework/GSTools/issues/170) - -### Bugfixes -- use `oldest-supported-numpy` to build cython extensions [#165](https://github.com/GeoStat-Framework/GSTools/pull/165) - - -## [1.3.0] - Pure Pink - 2021-04 - -### Topics - -#### Geographical Coordinates Support ([#113](https://github.com/GeoStat-Framework/GSTools/issues/113)) -- added boolean init parameter `latlon` to indicate a geographic model. When given, spatial dimension is fixed to `dim=3`, `anis` and `angles` will be ignored, since anisotropy is not well-defined on a sphere. -- add property `field_dim` to indicate the dimension of the resulting field. Will be 2 if `latlon=True` -- added yadrenko variogram, covariance and correlation method, since the geographic models are derived from standard models in 3D by plugging in the chordal distance of two points on a sphere derived from there great-circle distance `zeta`: - - `vario_yadrenko`: given by `variogram(2 * np.sin(zeta / 2))` - - `cov_yadrenko`: given by `covariance(2 * np.sin(zeta / 2))` - - `cor_yadrenko`: given by `correlation(2 * np.sin(zeta / 2))` -- added plotting routines for yadrenko methods described above -- the `isometrize` and `anisometrize` methods will convert `latlon` tuples (given in degree) to points on the unit-sphere in 3D and vice versa -- representation of geographical models don't display the `dim`, `anis` and `angles` parameters, but `latlon=True` -- `fit_variogram` will expect an estimated variogram with great-circle distances given in radians -- **Variogram estimation** - - `latlon` switch implemented in `estimate_vario` routine - - will return a variogram estimated by the great-circle distance (haversine formula) given in radians -- **Field** - - added plotting routines for latlon fields - - no vector fields possible on latlon fields - - corretly handle pos tuple for latlon fields - -#### Krige Unification ([#97](https://github.com/GeoStat-Framework/GSTools/issues/97)) -- Swiss Army Knife for kriging: The `Krige` class now provides everything in one place -- "Kriging the mean" is now possible with the switch `only_mean` in the call routine -- `Simple`/`Ordinary`/`Universal`/`ExtDrift`/`Detrended` are only shortcuts to `Krige` with limited input parameter list -- We now use the `covariance` function to build up the kriging matrix (instead of variogram) -- An `unbiased` switch was added to enable simple kriging (where the unbiased condition is not given) -- An `exact` switch was added to allow smother results, if a `nugget` is present in the model -- An `cond_err` parameter was added, where measurement error variances can be given for each conditional point -- pseudo-inverse matrix is now used to solve the kriging system (can be disabled by the new switch `pseudo_inv`), this is equal to solving the system with least-squares and prevents numerical errors -- added options `fit_normalizer` and `fit_variogram` to automatically fit normalizer and variogram to given data - -#### Directional Variograms and Auto-binning ([#87](https://github.com/GeoStat-Framework/GSTools/issues/87), [#106](https://github.com/GeoStat-Framework/GSTools/issues/106), [#131](https://github.com/GeoStat-Framework/GSTools/issues/131)) -- new routine name `vario_estimate` instead of `vario_estimate_unstructured` (old kept for legacy code) for simplicity -- new routine name `vario_estimate_axis` instead of `vario_estimate_structured` (old kept for legacy code) for simplicity -- **vario_estimate** - - added simple automatic binning routine to determine bins from given data (one third of box diameter as max bin distance, sturges rule for number of bins) - - allow to pass multiple fields for joint variogram estimation (e.g. for daily precipitation) on same mesh - - `no_data` option added to allow missing values - - **masked fields** - - user can now pass a masked array (or a list of masked arrays) to deselect data points. - - in addition, a `mask` keyword was added to provide an external mask - - **directional variograms** - - diretional variograms can now be estimated - - either provide a list of direction vectors or angles for directions (spherical coordinates) - - can be controlled by given angle tolerance and (optional) bandwidth - - prepared for nD - - structured fields (pos tuple describes axes) can now be passed to estimate an isotropic or directional variogram - - distance calculation in cython routines in now independent of dimension -- **vario_estimate_axis** - - estimation along array axis now possible in arbitrary dimensions - - `no_data` option added to allow missing values (sovles [#83](https://github.com/GeoStat-Framework/GSTools/issues/83)) - - axis can be given by name (`"x"`, `"y"`, `"z"`) or axis number (`0`, `1`, `2`, `3`, ...) - -#### Better Variogram fitting ([#78](https://github.com/GeoStat-Framework/GSTools/issues/78), [#145](https://github.com/GeoStat-Framework/GSTools/pull/145)) -- fixing sill possible now -- `loss` is now selectable for smoother handling of outliers -- r2 score can now be returned to get an impression of the goodness of fitting -- weights can be passed -- instead of deselecting parameters, one can also give fix values for each parameter -- default init guess for `len_scale` is now mean of given bin-centers -- default init guess for `var` and `nugget` is now mean of given variogram values - -#### CovModel update ([#109](https://github.com/GeoStat-Framework/GSTools/issues/109), [#122](https://github.com/GeoStat-Framework/GSTools/issues/122), [#157](https://github.com/GeoStat-Framework/GSTools/pull/157)) -- add new `rescale` argument and attribute to the `CovModel` class to be able to rescale the `len_scale` (usefull for unit conversion or rescaling `len_scale` to coincide with the `integral_scale` like it's the case with the Gaussian model) - See: [#90](https://github.com/GeoStat-Framework/GSTools/issues/90), [GeoStat-Framework/PyKrige#119](https://github.com/GeoStat-Framework/PyKrige/issues/119) -- added new `len_rescaled` attribute to the `CovModel` class, which is the rescaled `len_scale`: `len_rescaled = len_scale / rescale` -- new method `default_rescale` to provide default rescale factor (can be overridden) -- remove `doctest` calls -- docstring updates in `CovModel` and derived models -- updated all models to use the `cor` routine and make use of the `rescale` argument (See: [#90](https://github.com/GeoStat-Framework/GSTools/issues/90)) -- TPL models got a separate base class to not repeat code -- added **new models** (See: [#88](https://github.com/GeoStat-Framework/GSTools/issues/88)): - - `HyperSpherical`: (Replaces the old `Intersection` model) Derived from the intersection of hyper-spheres in arbitrary dimensions. Coincides with the linear model in 1D, the circular model in 2D and the classical spherical model in 3D - - `SuperSpherical`: like the HyperSpherical, but the shape parameter derived from dimension can be set by the user. Coincides with the HyperSpherical model by default - - `JBessel`: a hole model valid in all dimensions. The shape parameter controls the dimension it was derived from. For `nu=0.5` this model coincides with the well known `wave` hole model. - - `TPLSimple`: a simple truncated power law controlled by a shape parameter `nu`. Coincides with the truncated linear model for `nu=1` - - `Cubic`: to be compatible with scikit-gstat in the future -- all arguments are now stored as float internally ([#157](https://github.com/GeoStat-Framework/GSTools/pull/157)) -- string representation of the `CovModel` class is now using a float precision (`CovModel._prec=3`) to truncate longish output -- string representation of the `CovModel` class now only shows `anis` and `angles` if model is anisotropic resp. rotated -- dimension validity check: raise a warning, if given model is not valid in the desired dimension (See: [#86](https://github.com/GeoStat-Framework/GSTools/issues/86)) - -#### Normalizer, Trend and Mean ([#124](https://github.com/GeoStat-Framework/GSTools/issues/124)) - -- new `normalize` submodule containing power-transforms for data to gain normality -- Base-Class: `Normalizer` providing basic functionality including maximum likelihood fitting -- added: `LogNormal`, `BoxCox`, `BoxCoxShift`, `YeoJohnson`, `Modulus` and `Manly` -- normalizer, trend and mean can be passed to SRF, Krige and variogram estimation routines - - A trend can be a callable function, that represents a trend in input data. For example a linear decrease of temperature with height. - - The normalizer will be applied after the data was detrended, i.e. the trend was substracted from the data, in order to gain normality. - - The mean is now interpreted as the mean of the normalized data. The user could also provide a callable mean, but it is mostly meant to be constant. - -#### Arbitrary dimensions ([#112](https://github.com/GeoStat-Framework/GSTools/issues/112)) -- allow arbitrary dimensions in all routines (CovModel, Krige, SRF, variogram) -- anisotropy and rotation following a generalization of tait-bryan angles -- `CovModel` provides `isometrize` and `anisometrize` routines to convert points - -#### New Class for Conditioned Random Fields ([#130](https://github.com/GeoStat-Framework/GSTools/issues/130)) -- **THIS BREAKS BACKWARD COMPATIBILITY** -- `CondSRF` replaces the conditioning feature of the SRF class, which was cumbersome and limited to Ordinary and Simple kriging -- `CondSRF` behaves similar to the `SRF` class, but instead of a covariance model, it takes a kriging class as input. With this kriging class, all conditioning related settings are defined. - -### Enhancements -- Python 3.9 Support [#107](https://github.com/GeoStat-Framework/GSTools/issues/107) -- add routines to format struct. pos tuple by given `dim` or `shape` -- add routine to format struct. pos tuple by given `shape` (variogram helper) -- remove `field.tools` subpackage -- support `meshio>=4.0` and add as dependency -- PyVista mesh support [#59](https://github.com/GeoStat-Framework/GSTools/issues/59) -- added `EARTH_RADIUS` as constant providing earths radius in km (can be used to rescale models) -- add routines `latlon2pos` and `pos2latlon` to convert lat-lon coordinates to points on unit-sphere and vice versa -- a lot of new examples and tutorials -- `RandMeth` class got a switch to select the sampling strategy -- plotter for n-D fields added [#141](https://github.com/GeoStat-Framework/GSTools/issues/141) -- antialias for contour plots of 2D fields [#141](https://github.com/GeoStat-Framework/GSTools/issues/141) -- building from source is now configured with `pyproject.toml` to care about build dependencies, see [#154](https://github.com/GeoStat-Framework/GSTools/issues/154) +First release of GSTools-Cython ### Changes -- drop support for Python 3.5 [#146](https://github.com/GeoStat-Framework/GSTools/pull/146) -- added a finit limit for shape-parameters in some `CovModel`s [#147](https://github.com/GeoStat-Framework/GSTools/pull/147) -- drop usage of `pos2xyz` and `xyz2pos` -- remove structured option from generators (structured pos need to be converted first) -- explicitly assert dim=2,3 when generating vector fields -- simplify `pre_pos` routine to save pos tuple and reformat it an unstructured tuple -- simplify field shaping -- simplify plotting routines -- only the `"unstructured"` keyword is recognized everywhere, everything else is interpreted as `"structured"` (e.g. `"rectilinear"`) -- use GitHub-Actions instead of TravisCI -- parallel build now controlled by env-var `GSTOOLS_BUILD_PARALLEL=1`, see [#154](https://github.com/GeoStat-Framework/GSTools/issues/154) -- install extra target for `[dev]` dropped, can be reproduced by `pip install gstools[test, doc]`, see [#154](https://github.com/GeoStat-Framework/GSTools/issues/154) - -### Bugfixes -- typo in keyword argument for vario_estimate_structured [#80](https://github.com/GeoStat-Framework/GSTools/issues/80) -- isotropic rotation of SRF was not possible [#100](https://github.com/GeoStat-Framework/GSTools/issues/100) -- `CovModel.opt_arg` now sorted [#103](https://github.com/GeoStat-Framework/GSTools/issues/103) -- `CovModel.fit`: check if weights are given as a string (numpy comparison error) [#111](https://github.com/GeoStat-Framework/GSTools/issues/111) -- several pylint fixes ([#159](https://github.com/GeoStat-Framework/GSTools/pull/159)) - - -## [1.2.1] - Volatile Violet - 2020-04-14 - -### Bugfixes -- `ModuleNotFoundError` is not present in py35 -- Fixing Cressie-Bug #76 -- Adding analytical formula for integral scales of rational and stable model -- remove prange from IncomprRandMeth summators to prevent errors on Win and macOS - - -## [1.2.0] - Volatile Violet - 2020-03-20 - -### Enhancements -- different variogram estimator functions can now be used #51 -- the TPLGaussian and TPLExponential now have analytical spectra #67 -- added property `is_isotropic` to `CovModel` #67 -- reworked the whole krige sub-module to provide multiple kriging methods #67 - - Simple - - Ordinary - - Universal - - External Drift Kriging - - Detrended Kriging -- a new transformation function for discrete fields has been added #70 -- reworked tutorial section in the documentation #63 -- pyvista interface #29 - -### Changes -- Python versions 2.7 and 3.4 are no longer supported #40 #43 -- `CovModel`: in 3D the input of anisotropy is now treated slightly different: #67 - - single given anisotropy value [e] is converted to [1, e] (it was [e, e] before) - - two given length-scales [l_1, l_2] are converted to [l_1, l_2, l_2] (it was [l_1, l_2, l_1] before) - -### Bugfixes -- a race condition in the structured variogram estimation has been fixed #51 - - -## [1.1.1] - Reverberating Red - 2019-11-08 - -### Enhancements -- added a changelog. See: [commit fbea883](https://github.com/GeoStat-Framework/GSTools/commit/fbea88300d0862393e52f4b7c3d2b15c2039498b) - -### Changes -- deprecation warnings are now printed if Python versions 2.7 or 3.4 are used #40 #41 - -### Bugfixes -- define spectral_density instead of spectrum in covariance models since Cov-base derives spectrum. See: [commit 00f2747](https://github.com/GeoStat-Framework/GSTools/commit/00f2747fd0503ff8806f2eebfba36acff813416b) -- better boundaries for `CovModel` parameters. See: https://github.com/GeoStat-Framework/GSTools/issues/37 - - -## [1.1.0] - Reverberating Red - 2019-10-01 - -### Enhancements -- by using Cython for all the heavy computations, we could achieve quite some speed ups and reduce the memory consumption significantly #16 -- parallel computation in Cython is now supported with the help of OpenMP and the performance increase is nearly linear with increasing cores #16 -- new submodule `krige` providing simple (known mean) and ordinary (estimated mean) kriging working analogous to the srf class -- interface to pykrige to use the gstools `CovModel` with the pykrige routines (https://github.com/bsmurphy/PyKrige/issues/124) -- the srf class now provides a `plot` and a `vtk_export` routine -- incompressible flow fields can now be generated #14 -- new submodule providing several field transformations like: Zinn&Harvey, log-normal, bimodal, ... #13 -- Python 3.4 and 3.7 wheel support #19 -- field can now be generated directly on meshes from [meshio](https://github.com/nschloe/meshio) and [ogs5py](https://github.com/GeoStat-Framework/ogs5py), see: [commit f4a3439](https://github.com/GeoStat-Framework/GSTools/commit/f4a3439400b81d8d9db81a5f7fbf6435f603cf05) -- the srf and kriging classes now store the last `pos`, `mesh_type` and `field` values to keep them accessible, see: [commit 29f7f1b](https://github.com/GeoStat-Framework/GSTools/commit/29f7f1b029866379ce881f44765f72534d757fae) -- tutorials on all important features of GSTools have been written for you guys #20 -- a new interface to pyvista is provided to export fields to python vtk representation, which can be used for plotting, exploring and exporting fields #29 - -### Changes -- the license was changed from GPL to LGPL in order to promote the use of this library #25 -- the rotation angles are now interpreted in positive direction (counter clock wise) -- the `force_moments` keyword was removed from the SRF call method, it is now in provided as a field transformation #13 -- drop support of python implementations of the variogram estimators #18 -- the `variogram_normed` method was removed from the `CovModel` class due to redundance [commit 25b1647](https://github.com/GeoStat-Framework/GSTools/commit/25b164722ac6744ebc7e03f3c0bf1c30be1eba89) -- the position vector of 1D fields does not have to be provided in a list-like object with length 1 [commit a6f5be8](https://github.com/GeoStat-Framework/GSTools/commit/a6f5be8bfd2db1f002e7889ecb8e9a037ea08886) - -### Bugfixes -- several minor bugfixes - - -## [1.0.1] - Bouncy Blue - 2019-01-18 - -### Bugfixes -- fixed Numpy and Cython version during build process - - -## [1.0.0] - Bouncy Blue - 2019-01-16 - -### Enhancements -- added a new covariance class, which allows the easy usage of arbitrary covariance models -- added many predefined covariance models, including truncated power law models -- added [tutorials](https://geostat-framework.readthedocs.io/projects/gstools/en/latest/tutorials.html) and examples, showing and explaining the main features of GSTools -- variogram models can be fitted to data -- prebuilt binaries for many Linux distributions, Mac OS and Windows, making the installation, especially of the Cython code, much easier -- the generated fields can now easily be exported to vtk files -- variance scaling is supported for coarser grids -- added pure Python versions of the variogram estimators, in case somebody has problems compiling Cython code -- the [documentation](https://geostat-framework.readthedocs.io/projects/gstools/en/latest/) is now a lot cleaner and easier to use -- the code is a lot cleaner and more consistent now -- unit tests are now automatically tested when new code is pushed -- test coverage of code is shown -- GeoStat Framework now has a website, visit us: https://geostat-framework.github.io/ - -### Changes -- release is not downwards compatible with release v0.4.0 -- SRF creation has been adapted for the `CovModel` -- a tuple `pos` is now used instead of `x`, `y`, and `z` for the axes -- renamed `estimate_unstructured` and `estimate_structured` to `vario_estimate_unstructured` and `vario_estimate_structured` for less ambiguity - -### Bugfixes -- several minor bugfixes - - -## [0.4.0] - Glorious Green - 2018-07-17 - -### Bugfixes -- import of cython functions put into a try-block - - -## [0.3.6] - Original Orange - 2018-07-17 - -First release of GSTools. +- moved Cython files into this separate package -[Unreleased]: https://github.com/GeoStat-Framework/gstools/compare/v1.5.2...HEAD -[1.5.2]: https://github.com/GeoStat-Framework/gstools/compare/v1.5.1...v1.5.2 -[1.5.1]: https://github.com/GeoStat-Framework/gstools/compare/v1.5.0...v1.5.1 -[1.5.0]: https://github.com/GeoStat-Framework/gstools/compare/v1.4.1...v1.5.0 -[1.4.1]: https://github.com/GeoStat-Framework/gstools/compare/v1.4.0...v1.4.1 -[1.4.0]: https://github.com/GeoStat-Framework/gstools/compare/v1.3.5...v1.4.0 -[1.3.5]: https://github.com/GeoStat-Framework/gstools/compare/v1.3.4...v1.3.5 -[1.3.4]: https://github.com/GeoStat-Framework/gstools/compare/v1.3.3...v1.3.4 -[1.3.3]: https://github.com/GeoStat-Framework/gstools/compare/v1.3.2...v1.3.3 -[1.3.2]: https://github.com/GeoStat-Framework/gstools/compare/v1.3.1...v1.3.2 -[1.3.1]: https://github.com/GeoStat-Framework/gstools/compare/v1.3.0...v1.3.1 -[1.3.0]: https://github.com/GeoStat-Framework/gstools/compare/v1.2.1...v1.3.0 -[1.2.1]: https://github.com/GeoStat-Framework/gstools/compare/v1.2.0...v1.2.1 -[1.2.0]: https://github.com/GeoStat-Framework/gstools/compare/v1.1.1...v1.2.0 -[1.1.1]: https://github.com/GeoStat-Framework/gstools/compare/v1.1.0...v1.1.1 -[1.1.0]: https://github.com/GeoStat-Framework/gstools/compare/v1.0.1...v1.1.0 -[1.0.1]: https://github.com/GeoStat-Framework/gstools/compare/v1.0.0...v1.0.1 -[1.0.0]: https://github.com/GeoStat-Framework/gstools/compare/0.4.0...v1.0.0 -[0.4.0]: https://github.com/GeoStat-Framework/gstools/compare/0.3.6...0.4.0 -[0.3.6]: https://github.com/GeoStat-Framework/gstools/releases/tag/0.3.6 +[Unreleased]: https://github.com/GeoStat-Framework/gstools-cython/compare/v1.0.0...HEAD +[1.0.0]: https://github.com/GeoStat-Framework/gstools-cython/releases/tag/v1.0.0 diff --git a/MANIFEST.in b/MANIFEST.in index 24184482..5778d3fa 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ prune ** recursive-include tests *.py -recursive-include src/gstools *.py *.pyx +recursive-include src/gstools_cython *.py *.pyx include AUTHORS.md LICENSE README.md pyproject.toml setup.py diff --git a/README.md b/README.md index 6cb69901..9ca6e7c0 100644 --- a/README.md +++ b/README.md @@ -93,270 +93,14 @@ If you want to cite a specific version, have a look at the [Zenodo site](https:/ You can find the documentation under [geostat-framework.readthedocs.io][doc_link]. -### Tutorials and Examples +## Cython backend -The documentation also includes some [tutorials][tut_link], showing the most important use cases of GSTools, which are +This package is the cython backend implementation for GSTools. -- [Random Field Generation][tut1_link] -- [The Covariance Model][tut2_link] -- [Variogram Estimation][tut3_link] -- [Random Vector Field Generation][tut4_link] -- [Kriging][tut5_link] -- [Conditioned random field generation][tut6_link] -- [Field transformations][tut7_link] -- [Geographic Coordinates][tut8_link] -- [Spatio-Temporal Modelling][tut9_link] -- [Normalizing Data][tut10_link] -- [Miscellaneous examples][tut0_link] -The associated python scripts are provided in the `examples` folder. - - -## Spatial Random Field Generation - -The core of this library is the generation of spatial random fields. These fields are generated using the randomisation method, described by [Heße et al. 2014][rand_link]. - -[rand_link]: https://doi.org/10.1016/j.envsoft.2014.01.013 - - -### Examples - -#### Gaussian Covariance Model - -This is an example of how to generate a 2 dimensional spatial random field with a gaussian covariance model. - -```python -import gstools as gs -# structured field with a size 100x100 and a grid-size of 1x1 -x = y = range(100) -model = gs.Gaussian(dim=2, var=1, len_scale=10) -srf = gs.SRF(model) -srf((x, y), mesh_type='structured') -srf.plot() -``` -

-Random field -

- -GSTools also provides support for [geographic coordinates](https://en.wikipedia.org/wiki/Geographic_coordinate_system). -This works perfectly well with [cartopy](https://scitools.org.uk/cartopy/docs/latest/index.html). - -```python -import matplotlib.pyplot as plt -import cartopy.crs as ccrs -import gstools as gs -# define a structured field by latitude and longitude -lat = lon = range(-80, 81) -model = gs.Gaussian(latlon=True, len_scale=777, geo_scale=gs.KM_SCALE) -srf = gs.SRF(model, seed=12345) -field = srf.structured((lat, lon)) -# Orthographic plotting with cartopy -ax = plt.subplot(projection=ccrs.Orthographic(-45, 45)) -cont = ax.contourf(lon, lat, field, transform=ccrs.PlateCarree()) -ax.coastlines() -ax.set_global() -plt.colorbar(cont) -``` - -

-lat-lon random field -

- -A similar example but for a three dimensional field is exported to a [VTK](https://vtk.org/) file, which can be visualized with [ParaView](https://www.paraview.org/) or [PyVista](https://docs.pyvista.org) in Python: - -```python -import gstools as gs -# structured field with a size 100x100x100 and a grid-size of 1x1x1 -x = y = z = range(100) -model = gs.Gaussian(dim=3, len_scale=[16, 8, 4], angles=(0.8, 0.4, 0.2)) -srf = gs.SRF(model) -srf((x, y, z), mesh_type='structured') -srf.vtk_export('3d_field') # Save to a VTK file for ParaView - -mesh = srf.to_pyvista() # Create a PyVista mesh for plotting in Python -mesh.contour(isosurfaces=8).plot() -``` - -

-3d Random field -

- - -## Estimating and Fitting Variograms - -The spatial structure of a field can be analyzed with the variogram, which contains the same information as the covariance function. - -All covariance models can be used to fit given variogram data by a simple interface. - -### Example - -This is an example of how to estimate the variogram of a 2 dimensional unstructured field and estimate the parameters of the covariance -model again. - -```python -import numpy as np -import gstools as gs -# generate a synthetic field with an exponential model -x = np.random.RandomState(19970221).rand(1000) * 100. -y = np.random.RandomState(20011012).rand(1000) * 100. -model = gs.Exponential(dim=2, var=2, len_scale=8) -srf = gs.SRF(model, mean=0, seed=19970221) -field = srf((x, y)) -# estimate the variogram of the field -bin_center, gamma = gs.vario_estimate((x, y), field) -# fit the variogram with a stable model. (no nugget fitted) -fit_model = gs.Stable(dim=2) -fit_model.fit_variogram(bin_center, gamma, nugget=False) -# output -ax = fit_model.plot(x_max=max(bin_center)) -ax.scatter(bin_center, gamma) -print(fit_model) -``` - -Which gives: - -```python -Stable(dim=2, var=1.85, len_scale=7.42, nugget=0.0, anis=[1.0], angles=[0.0], alpha=1.09) -``` - -

-Variogram -

- - -## Kriging and Conditioned Random Fields - -An important part of geostatistics is Kriging and conditioning spatial random -fields to measurements. With conditioned random fields, an ensemble of field realizations with their variability depending on the proximity of the measurements can be generated. - -### Example -For better visualization, we will condition a 1d field to a few "measurements", generate 100 realizations and plot them: - -```python -import numpy as np -import matplotlib.pyplot as plt -import gstools as gs - -# conditions -cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] -cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] - -# conditioned spatial random field class -model = gs.Gaussian(dim=1, var=0.5, len_scale=2) -krige = gs.krige.Ordinary(model, cond_pos, cond_val) -cond_srf = gs.CondSRF(krige) -# same output positions for all ensemble members -grid_pos = np.linspace(0.0, 15.0, 151) -cond_srf.set_pos(grid_pos) - -# seeded ensemble generation -seed = gs.random.MasterRNG(20170519) -for i in range(100): - field = cond_srf(seed=seed(), store=f"field_{i}") - plt.plot(grid_pos, field, color="k", alpha=0.1) -plt.scatter(cond_pos, cond_val, color="k") -plt.show() -``` - -

-Conditioned -

- -## User Defined Covariance Models - -One of the core-features of GSTools is the powerful -[CovModel][cov_link] -class, which allows to easy define covariance models by the user. - -### Example - -Here we re-implement the Gaussian covariance model by defining just a -[correlation][cor_link] function, which takes a non-dimensional distance ``h = r/l``: - -```python -import numpy as np -import gstools as gs -# use CovModel as the base-class -class Gau(gs.CovModel): - def cor(self, h): - return np.exp(-h**2) -``` - -And that's it! With ``Gau`` you now have a fully working covariance model, -which you could use for field generation or variogram fitting as shown above. - -Have a look at the [documentation ][doc_link] for further information on incorporating -optional parameters and optimizations. - - -## Incompressible Vector Field Generation - -Using the original [Kraichnan method][kraichnan_link], incompressible random -spatial vector fields can be generated. - - -### Example - -```python -import numpy as np -import gstools as gs -x = np.arange(100) -y = np.arange(100) -model = gs.Gaussian(dim=2, var=1, len_scale=10) -srf = gs.SRF(model, generator='VectorField', seed=19841203) -srf((x, y), mesh_type='structured') -srf.plot() -``` - -yielding - -

-vector field -

- - -[kraichnan_link]: https://doi.org/10.1063/1.1692799 - - -## VTK/PyVista Export - -After you have created a field, you may want to save it to file, so we provide -a handy [VTK][vtk_link] export routine using the `.vtk_export()` or you could -create a VTK/PyVista dataset for use in Python with to `.to_pyvista()` method: - -```python -import gstools as gs -x = y = range(100) -model = gs.Gaussian(dim=2, var=1, len_scale=10) -srf = gs.SRF(model) -srf((x, y), mesh_type='structured') -srf.vtk_export("field") # Saves to a VTK file -mesh = srf.to_pyvista() # Create a VTK/PyVista dataset in memory -mesh.plot() -``` - -Which gives a RectilinearGrid VTK file ``field.vtr`` or creates a PyVista mesh -in memory for immediate 3D plotting in Python. - -

-pyvista export -

- - -## Requirements: +## Requirements - [NumPy >= 1.20.0](https://www.numpy.org) -- [SciPy >= 1.1.0](https://www.scipy.org/scipylib) -- [hankel >= 1.0.0](https://github.com/steven-murray/hankel) -- [emcee >= 3.0.0](https://github.com/dfm/emcee) -- [pyevtk >= 1.1.1](https://github.com/pyscience-projects/pyevtk) -- [meshio >= 5.1.0](https://github.com/nschloe/meshio) - -### Optional - -- [GSTools-Core >= 0.2.0](https://github.com/GeoStat-Framework/GSTools-Core) -- [matplotlib](https://matplotlib.org) -- [pyvista](https://docs.pyvista.org/) ## Contact @@ -368,28 +112,4 @@ You can contact us via . [LGPLv3][license_link] © 2018-2024 -[pip_link]: https://pypi.org/project/gstools -[conda_link]: https://docs.conda.io/en/latest/miniconda.html -[conda_forge_link]: https://github.com/conda-forge/gstools-feedstock#installing-gstools -[conda_pip]: https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-pkgs.html#installing-non-conda-packages -[pipiflag]: https://pip-python3.readthedocs.io/en/latest/reference/pip_install.html?highlight=i#cmdoption-i -[winpy_link]: https://winpython.github.io/ -[license_link]: https://github.com/GeoStat-Framework/GSTools/blob/main/LICENSE -[cov_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/generated/gstools.covmodel.CovModel.html#gstools.covmodel.CovModel -[stable_link]: https://en.wikipedia.org/wiki/Stable_distribution -[doc_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/ -[doc_install_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/#pip -[tut_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/tutorials.html -[tut1_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/01_random_field/index.html -[tut2_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/02_cov_model/index.html -[tut3_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/03_variogram/index.html -[tut4_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/04_vector_field/index.html -[tut5_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/05_kriging/index.html -[tut6_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/06_conditioned_fields/index.html -[tut7_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/07_transformations/index.html -[tut8_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/08_geo_coordinates/index.html -[tut9_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/09_spatio_temporal/index.html -[tut10_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/10_normalizer/index.html -[tut0_link]: https://geostat-framework.readthedocs.io/projects/gstools/en/stable/examples/00_misc/index.html -[cor_link]: https://en.wikipedia.org/wiki/Autocovariance#Normalization -[vtk_link]: https://www.vtk.org/ +[license_link]: https://github.com/GeoStat-Framework/GSTools-Cython/blob/main/LICENSE diff --git a/docs/source/api.rst b/docs/source/api.rst index fe12233b..8364cf37 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -1,8 +1,8 @@ -=========== -GSTools API -=========== +================== +GSTools-Cython API +================== -.. automodule:: gstools +.. automodule:: gstools_cython .. raw:: latex diff --git a/docs/source/conf.py b/docs/source/conf.py index e89928fc..17eef7b9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,7 +33,7 @@ # local module should not be added to sys path if it's installed on RTFD # see: https://stackoverflow.com/a/31882049/6696397 # sys.path.insert(0, os.path.abspath("../../")) -from gstools import __version__ as ver +from gstools_cython import __version__ as ver def skip(app, what, name, obj, skip, options): @@ -109,7 +109,7 @@ def setup(app): # General information about the project. curr_year = datetime.datetime.now().year -project = "GSTools" +project = "GSTools-Cython" copyright = f"2018 - {curr_year}, Sebastian Müller, Lennart Schüler" author = "Sebastian Müller, Lennart Schüler" @@ -322,7 +322,7 @@ def setup(app): # directory where function granular galleries are stored "backreferences_dir": None, # Modules for which function level galleries are created. In - "doc_module": "gstools", + "doc_module": "gstools_cython", # "first_notebook_cell": ( # "%matplotlib inline\n" # "from pyvista import set_plot_theme\n" diff --git a/pyproject.toml b/pyproject.toml index cd0dc6ed..5d244eb2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ requires = [ "setuptools>=64", "setuptools_scm>=7", - "numpy>=2.0.0rc1,<2.3; python_version >= '3.9'", + "numpy>=2.0.0rc1; python_version >= '3.9'", "oldest-supported-numpy; python_version < '3.9'", "Cython>=3.0.10,<3.1.0", "extension-helpers>=1", @@ -11,8 +11,8 @@ build-backend = "setuptools.build_meta" [project] requires-python = ">=3.8" -name = "gstools" -description = "GSTools: A geostatistical toolbox." +name = "gstools_cython" +description = "Cython backend for GSTools." authors = [ {name = "Sebastian Müller, Lennart Schüler", email = "info@geostat-framework.org"}, ] @@ -46,12 +46,7 @@ classifiers = [ "Topic :: Utilities", ] dependencies = [ - "emcee>=3.0.0", - "hankel>=1.0.0", - "meshio>=5.1.0", "numpy>=1.20.0", - "pyevtk>=1.1.1", - "scipy>=1.1.0", ] [project.optional-dependencies] @@ -67,11 +62,6 @@ doc = [ "sphinx-rtd-theme>=2", "sphinxcontrib-youtube>=1.1", ] -plotting = [ - "matplotlib>=3.7", - "pyvista>=0.40", -] -rust = ["gstools_core>=0.2.0,<1"] test = ["pytest-cov>=3"] lint = [ "black>=24", @@ -81,18 +71,18 @@ lint = [ ] [project.urls] -Changelog = "https://github.com/GeoStat-Framework/GSTools/blob/main/CHANGELOG.md" +Changelog = "https://github.com/GeoStat-Framework/GSTools-Cython/blob/main/CHANGELOG.md" Conda-Forge = "https://anaconda.org/conda-forge/gstools" Documentation = "https://gstools.readthedocs.io" Homepage = "https://geostat-framework.org/#gstools" -Source = "https://github.com/GeoStat-Framework/GSTools" -Tracker = "https://github.com/GeoStat-Framework/GSTools/issues" +Source = "https://github.com/GeoStat-Framework/GSTools-Cython" +Tracker = "https://github.com/GeoStat-Framework/GSTools-Cython/issues" [tool.setuptools] license-files = ["LICENSE"] [tool.setuptools_scm] -write_to = "src/gstools/_version.py" +write_to = "src/gstools_cython/_version.py" write_to_template = "__version__ = '{version}'" local_scheme = "no-local-version" fallback_version = "0.0.0.dev0" @@ -114,13 +104,11 @@ target-version = [ [tool.coverage] [tool.coverage.run] - source = ["gstools"] + source = ["gstools_cython"] omit = [ "*docs*", "*examples*", "*tests*", - "*/src/gstools/covmodel/plot.py", - "*/src/gstools/field/plot.py", ] [tool.coverage.report] diff --git a/setup.py b/setup.py index b27548a9..7368f686 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -"""GSTools: A geostatistical toolbox.""" +"""GSTools-Cython: Cython backend for GSTools.""" import os @@ -10,12 +10,12 @@ # cython extensions CY_MODULES = [ Extension( - name=f"gstools.{ext}", - sources=[os.path.join("src", "gstools", *ext.split(".")) + ".pyx"], + name=f"gstools_cython.{ext}", + sources=[os.path.join("src", "gstools_cython", ext) + ".pyx"], include_dirs=[np.get_include()], define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")], ) - for ext in ["field.summator", "variogram.estimator", "krige.krigesum"] + for ext in ["summator", "estimator", "krigesum"] ] # you can set GSTOOLS_BUILD_PARALLEL=0 or GSTOOLS_BUILD_PARALLEL=1 open_mp = False @@ -23,9 +23,9 @@ added = [add_openmp_flags_if_available(mod) for mod in CY_MODULES] if any(added): open_mp = True - print(f"## GSTools setup: OpenMP used: {open_mp}") + print(f"## GSTools-Cython setup: OpenMP used: {open_mp}") else: - print("## GSTools setup: OpenMP not wanted by the user.") + print("## GSTools-Cython setup: OpenMP not wanted by the user.") # setup - do not include package data to ignore .pyx files in wheels setup( diff --git a/src/gstools_cython/__init__.py b/src/gstools_cython/__init__.py index 11e63a2b..6c332893 100644 --- a/src/gstools_cython/__init__.py +++ b/src/gstools_cython/__init__.py @@ -7,7 +7,7 @@ kriging and variogram estimation based on a list of provided or even user-defined covariance models. -The following functionalities are directly provided on module-level. +This package provides the Cython backend implementations for GSTools. Subpackages =========== @@ -15,231 +15,19 @@ .. autosummary:: :toctree: api - covmodel - field - variogram - krige - random - tools - transform - normalizer - -Classes -======= - -Kriging -^^^^^^^ -Swiss-Army-Knife for Kriging. For short cut classes see: :any:`gstools.krige` - -.. currentmodule:: gstools.krige - -.. autosummary:: - Krige - -Spatial Random Field -^^^^^^^^^^^^^^^^^^^^ -Classes for (conditioned) random field generation - -.. currentmodule:: gstools.field - -.. autosummary:: - SRF - CondSRF - -Covariance Base-Class -^^^^^^^^^^^^^^^^^^^^^ -Class to construct user defined covariance models - -.. currentmodule:: gstools.covmodel - -.. autosummary:: - CovModel - -Covariance Models -^^^^^^^^^^^^^^^^^ - -Standard Covariance Models -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. autosummary:: - Gaussian - Exponential - Matern - Integral - Stable - Rational - Cubic - Linear - Circular - Spherical - HyperSpherical - SuperSpherical - JBessel - -Truncated Power Law Covariance Models -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. autosummary:: - TPLGaussian - TPLExponential - TPLStable - TPLSimple - -Functions -========= - -VTK-Export -^^^^^^^^^^ -Routines to export fields to the vtk format - -.. currentmodule:: gstools.tools - -.. autosummary:: - vtk_export - to_vtk - -Geometric -^^^^^^^^^ -Some convenient functions for geometric operations - -.. autosummary:: - rotated_main_axes - generate_grid - generate_st_grid - -Variogram Estimation -^^^^^^^^^^^^^^^^^^^^ -Estimate the variogram of a given field with these routines - -.. currentmodule:: gstools.variogram - -.. autosummary:: - vario_estimate - vario_estimate_axis - standard_bins - -Misc -==== - -.. currentmodule:: gstools.tools - -.. autosummary:: - EARTH_RADIUS - KM_SCALE - DEGREE_SCALE - RADIAN_SCALE + estimator + krigesum + summator """ # Hooray! -from gstools import ( - config, - covmodel, - field, - krige, - normalizer, - random, - tools, - transform, - variogram, -) -from gstools.covmodel import ( - Circular, - CovModel, - Cubic, - Exponential, - Gaussian, - HyperSpherical, - Integral, - JBessel, - Linear, - Matern, - Rational, - Spherical, - Stable, - SuperSpherical, - TPLExponential, - TPLGaussian, - TPLSimple, - TPLStable, -) -from gstools.field import SRF, CondSRF -from gstools.krige import Krige -from gstools.tools import ( - DEGREE_SCALE, - EARTH_RADIUS, - KM_SCALE, - RADIAN_SCALE, - generate_grid, - generate_st_grid, - rotated_main_axes, - to_vtk, - to_vtk_structured, - to_vtk_unstructured, - vtk_export, - vtk_export_structured, - vtk_export_unstructured, -) -from gstools.variogram import ( - standard_bins, - vario_estimate, - vario_estimate_axis, - vario_estimate_structured, - vario_estimate_unstructured, -) +from . import estimator, krigesum, summator try: - from gstools._version import __version__ + from ._version import __version__ except ModuleNotFoundError: # pragma: no cover # package is not installed - __version__ = "0.0.0.dev0" + __version__ = "unknown" __all__ = ["__version__"] -__all__ += ["covmodel", "field", "variogram", "krige", "random", "tools"] -__all__ += ["transform", "normalizer", "config"] -__all__ += [ - "CovModel", - "Gaussian", - "Exponential", - "Matern", - "Integral", - "Stable", - "Rational", - "Cubic", - "Linear", - "Circular", - "Spherical", - "HyperSpherical", - "SuperSpherical", - "JBessel", - "TPLGaussian", - "TPLExponential", - "TPLStable", - "TPLSimple", -] - -__all__ += [ - "vario_estimate", - "vario_estimate_axis", - "vario_estimate_structured", - "vario_estimate_unstructured", - "standard_bins", -] - -__all__ += [ - "Krige", - "SRF", - "CondSRF", - "rotated_main_axes", - "generate_grid", - "generate_st_grid", - "EARTH_RADIUS", - "KM_SCALE", - "DEGREE_SCALE", - "RADIAN_SCALE", - "vtk_export", - "vtk_export_structured", - "vtk_export_unstructured", - "to_vtk", - "to_vtk_structured", - "to_vtk_unstructured", -] +__all__ += ["estimator", "krigesum", "summator"] From 68193314018c1b42c84d4c6fc4a38ed468ac25c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20M=C3=BCller?= Date: Fri, 12 Jul 2024 00:01:28 +0200 Subject: [PATCH 05/26] rename submodules to be in line with the rust backend --- setup.py | 2 +- src/gstools_cython/__init__.py | 8 ++++---- src/gstools_cython/{summator.pyx => field.pyx} | 0 src/gstools_cython/{krigesum.pyx => krige.pyx} | 0 src/gstools_cython/{estimator.pyx => variogram.pyx} | 0 5 files changed, 5 insertions(+), 5 deletions(-) rename src/gstools_cython/{summator.pyx => field.pyx} (100%) rename src/gstools_cython/{krigesum.pyx => krige.pyx} (100%) rename src/gstools_cython/{estimator.pyx => variogram.pyx} (100%) diff --git a/setup.py b/setup.py index 7368f686..2a98366c 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ include_dirs=[np.get_include()], define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")], ) - for ext in ["summator", "estimator", "krigesum"] + for ext in ["field", "krige", "variogram"] ] # you can set GSTOOLS_BUILD_PARALLEL=0 or GSTOOLS_BUILD_PARALLEL=1 open_mp = False diff --git a/src/gstools_cython/__init__.py b/src/gstools_cython/__init__.py index 6c332893..27f6ee40 100644 --- a/src/gstools_cython/__init__.py +++ b/src/gstools_cython/__init__.py @@ -15,9 +15,9 @@ .. autosummary:: :toctree: api - estimator - krigesum - summator + field + krige + variogram """ # Hooray! @@ -30,4 +30,4 @@ __version__ = "unknown" __all__ = ["__version__"] -__all__ += ["estimator", "krigesum", "summator"] +__all__ += ["field", "krige", "variogram"] diff --git a/src/gstools_cython/summator.pyx b/src/gstools_cython/field.pyx similarity index 100% rename from src/gstools_cython/summator.pyx rename to src/gstools_cython/field.pyx diff --git a/src/gstools_cython/krigesum.pyx b/src/gstools_cython/krige.pyx similarity index 100% rename from src/gstools_cython/krigesum.pyx rename to src/gstools_cython/krige.pyx diff --git a/src/gstools_cython/estimator.pyx b/src/gstools_cython/variogram.pyx similarity index 100% rename from src/gstools_cython/estimator.pyx rename to src/gstools_cython/variogram.pyx From 9ddedb296587a217290b2b3919cd823712020d35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20M=C3=BCller?= Date: Fri, 12 Jul 2024 21:30:44 +0200 Subject: [PATCH 06/26] remove examples --- examples/00_misc/00_tpl_stable.py | 62 --- examples/00_misc/01_export.py | 25 - .../00_misc/02_check_rand_meth_sampling.py | 68 --- examples/00_misc/04_herten.py | 290 ---------- examples/00_misc/05_standalone_field.py | 30 -- examples/00_misc/README.rst | 9 - examples/00_misc/grid_dim_origin_spacing.txt | 3 - examples/00_misc/herten_transmissivity.gz | Bin 7274190 -> 0 bytes examples/01_random_field/00_gaussian.py | 38 -- examples/01_random_field/01_srf_ensemble.py | 54 -- examples/01_random_field/02_fancier.py | 31 -- .../01_random_field/03_unstr_srf_export.py | 34 -- examples/01_random_field/04_srf_merge.py | 49 -- examples/01_random_field/05_mesh_ensemble.py | 94 ---- .../01_random_field/06_pyvista_support.py | 58 -- .../01_random_field/07_higher_dimensions.py | 82 --- examples/01_random_field/README.rst | 17 - examples/02_cov_model/00_intro.py | 75 --- examples/02_cov_model/01_basic_methods.py | 45 -- examples/02_cov_model/02_aniso_rotation.py | 56 -- examples/02_cov_model/03_spectral_methods.py | 47 -- examples/02_cov_model/04_different_scales.py | 69 --- examples/02_cov_model/05_additional_para.py | 47 -- .../02_cov_model/06_fitting_para_ranges.py | 76 --- examples/02_cov_model/README.rst | 88 ---- examples/03_variogram/00_fit_variogram.py | 36 -- examples/03_variogram/01_find_best_model.py | 64 --- examples/03_variogram/02_multi_vario.py | 44 -- examples/03_variogram/03_directional_2d.py | 66 --- examples/03_variogram/04_directional_3d.py | 98 ---- .../03_variogram/05_auto_fit_variogram.py | 37 -- examples/03_variogram/06_auto_bin_latlon.py | 90 ---- examples/03_variogram/README.rst | 14 - .../04_vector_field/00_2d_vector_field.py | 47 -- .../04_vector_field/01_3d_vector_field.py | 64 --- examples/04_vector_field/README.rst | 37 -- examples/05_kriging/00_simple_kriging.py | 50 -- examples/05_kriging/01_ordinary_kriging.py | 50 -- examples/05_kriging/02_pykrige_interface.py | 64 --- examples/05_kriging/03_compare_kriging.py | 36 -- examples/05_kriging/04_extdrift_kriging.py | 26 - examples/05_kriging/05_universal_kriging.py | 41 -- examples/05_kriging/06_detrended_kriging.py | 32 -- .../07_detrended_ordinary_kriging.py | 32 -- examples/05_kriging/08_measurement_errors.py | 56 -- examples/05_kriging/09_pseudo_inverse.py | 40 -- examples/05_kriging/README.rst | 100 ---- .../00_condition_ensemble.py | 63 --- .../01_2D_condition_ensemble.py | 71 --- examples/06_conditioned_fields/README.rst | 26 - examples/07_transformations/00_log_normal.py | 18 - examples/07_transformations/01_binary.py | 20 - examples/07_transformations/02_discrete.py | 45 -- examples/07_transformations/03_zinn_harvey.py | 21 - examples/07_transformations/04_bimodal.py | 23 - .../07_transformations/05_combinations.py | 41 -- examples/07_transformations/README.rst | 50 -- .../08_geo_coordinates/00_field_generation.py | 65 --- examples/08_geo_coordinates/01_dwd_krige.py | 174 ------ examples/08_geo_coordinates/README.rst | 68 --- examples/08_geo_coordinates/de_borders.txt | 492 ----------------- examples/08_geo_coordinates/temp_obs.txt | 494 ------------------ examples/09_spatio_temporal/01_precip_1d.py | 130 ----- examples/09_spatio_temporal/02_precip_2d.py | 76 --- .../03_geographic_coordinates.py | 38 -- examples/09_spatio_temporal/README.rst | 65 --- .../10_normalizer/00_lognormal_kriging.py | 55 -- examples/10_normalizer/01_auto_fit.py | 107 ---- examples/10_normalizer/02_compare.py | 67 --- examples/10_normalizer/README.rst | 55 -- 70 files changed, 4835 deletions(-) delete mode 100644 examples/00_misc/00_tpl_stable.py delete mode 100644 examples/00_misc/01_export.py delete mode 100644 examples/00_misc/02_check_rand_meth_sampling.py delete mode 100644 examples/00_misc/04_herten.py delete mode 100644 examples/00_misc/05_standalone_field.py delete mode 100644 examples/00_misc/README.rst delete mode 100644 examples/00_misc/grid_dim_origin_spacing.txt delete mode 100644 examples/00_misc/herten_transmissivity.gz delete mode 100644 examples/01_random_field/00_gaussian.py delete mode 100644 examples/01_random_field/01_srf_ensemble.py delete mode 100644 examples/01_random_field/02_fancier.py delete mode 100644 examples/01_random_field/03_unstr_srf_export.py delete mode 100644 examples/01_random_field/04_srf_merge.py delete mode 100755 examples/01_random_field/05_mesh_ensemble.py delete mode 100644 examples/01_random_field/06_pyvista_support.py delete mode 100755 examples/01_random_field/07_higher_dimensions.py delete mode 100644 examples/01_random_field/README.rst delete mode 100644 examples/02_cov_model/00_intro.py delete mode 100755 examples/02_cov_model/01_basic_methods.py delete mode 100755 examples/02_cov_model/02_aniso_rotation.py delete mode 100755 examples/02_cov_model/03_spectral_methods.py delete mode 100755 examples/02_cov_model/04_different_scales.py delete mode 100755 examples/02_cov_model/05_additional_para.py delete mode 100755 examples/02_cov_model/06_fitting_para_ranges.py delete mode 100644 examples/02_cov_model/README.rst delete mode 100644 examples/03_variogram/00_fit_variogram.py delete mode 100755 examples/03_variogram/01_find_best_model.py delete mode 100755 examples/03_variogram/02_multi_vario.py delete mode 100755 examples/03_variogram/03_directional_2d.py delete mode 100755 examples/03_variogram/04_directional_3d.py delete mode 100644 examples/03_variogram/05_auto_fit_variogram.py delete mode 100644 examples/03_variogram/06_auto_bin_latlon.py delete mode 100644 examples/03_variogram/README.rst delete mode 100644 examples/04_vector_field/00_2d_vector_field.py delete mode 100755 examples/04_vector_field/01_3d_vector_field.py delete mode 100644 examples/04_vector_field/README.rst delete mode 100755 examples/05_kriging/00_simple_kriging.py delete mode 100644 examples/05_kriging/01_ordinary_kriging.py delete mode 100755 examples/05_kriging/02_pykrige_interface.py delete mode 100755 examples/05_kriging/03_compare_kriging.py delete mode 100755 examples/05_kriging/04_extdrift_kriging.py delete mode 100755 examples/05_kriging/05_universal_kriging.py delete mode 100755 examples/05_kriging/06_detrended_kriging.py delete mode 100755 examples/05_kriging/07_detrended_ordinary_kriging.py delete mode 100755 examples/05_kriging/08_measurement_errors.py delete mode 100755 examples/05_kriging/09_pseudo_inverse.py delete mode 100644 examples/05_kriging/README.rst delete mode 100644 examples/06_conditioned_fields/00_condition_ensemble.py delete mode 100644 examples/06_conditioned_fields/01_2D_condition_ensemble.py delete mode 100644 examples/06_conditioned_fields/README.rst delete mode 100755 examples/07_transformations/00_log_normal.py delete mode 100755 examples/07_transformations/01_binary.py delete mode 100755 examples/07_transformations/02_discrete.py delete mode 100755 examples/07_transformations/03_zinn_harvey.py delete mode 100755 examples/07_transformations/04_bimodal.py delete mode 100755 examples/07_transformations/05_combinations.py delete mode 100644 examples/07_transformations/README.rst delete mode 100755 examples/08_geo_coordinates/00_field_generation.py delete mode 100755 examples/08_geo_coordinates/01_dwd_krige.py delete mode 100644 examples/08_geo_coordinates/README.rst delete mode 100644 examples/08_geo_coordinates/de_borders.txt delete mode 100644 examples/08_geo_coordinates/temp_obs.txt delete mode 100644 examples/09_spatio_temporal/01_precip_1d.py delete mode 100644 examples/09_spatio_temporal/02_precip_2d.py delete mode 100644 examples/09_spatio_temporal/03_geographic_coordinates.py delete mode 100644 examples/09_spatio_temporal/README.rst delete mode 100644 examples/10_normalizer/00_lognormal_kriging.py delete mode 100644 examples/10_normalizer/01_auto_fit.py delete mode 100644 examples/10_normalizer/02_compare.py delete mode 100644 examples/10_normalizer/README.rst diff --git a/examples/00_misc/00_tpl_stable.py b/examples/00_misc/00_tpl_stable.py deleted file mode 100644 index 474b0f55..00000000 --- a/examples/00_misc/00_tpl_stable.py +++ /dev/null @@ -1,62 +0,0 @@ -r""" -Truncated Power Law Variograms ------------------------------- - -GSTools also implements truncated power law variograms, -which can be represented as a superposition of scale dependant modes -in form of standard variograms, which are truncated by -a lower- :math:`\ell_{\mathrm{low}}` and -an upper length-scale :math:`\ell_{\mathrm{up}}`. - -This example shows the truncated power law (:any:`TPLStable`) based on the -:any:`Stable` covariance model and is given by - -.. math:: - \gamma_{\ell_{\mathrm{low}},\ell_{\mathrm{up}}}(r) = - \intop_{\ell_{\mathrm{low}}}^{\ell_{\mathrm{up}}} - \gamma(r,\lambda) \frac{\rm d \lambda}{\lambda} - -with `Stable` modes on each scale: - -.. math:: - \gamma(r,\lambda) &= - \sigma^2(\lambda)\cdot\left(1- - \exp\left[- \left(\frac{r}{\lambda}\right)^{\alpha}\right] - \right)\\ - \sigma^2(\lambda) &= C\cdot\lambda^{2H} - -which gives Gaussian modes for ``alpha=2`` -or Exponential modes for ``alpha=1``. - -For :math:`\ell_{\mathrm{low}}=0` this results in: - -.. math:: - \gamma_{\ell_{\mathrm{up}}}(r) &= - \sigma^2_{\ell_{\mathrm{up}}}\cdot\left(1- - \frac{2H}{\alpha} \cdot - E_{1+\frac{2H}{\alpha}} - \left[\left(\frac{r}{\ell_{\mathrm{up}}}\right)^{\alpha}\right] - \right) \\ - \sigma^2_{\ell_{\mathrm{up}}} &= - C\cdot\frac{\ell_{\mathrm{up}}^{2H}}{2H} -""" - -import numpy as np - -import gstools as gs - -x = y = np.linspace(0, 100, 100) -model = gs.TPLStable( - dim=2, # spatial dimension - var=1, # variance (C is calculated internally, so variance is actually 1) - len_low=0, # lower truncation of the power law - len_scale=10, # length scale (a.k.a. range), len_up = len_low + len_scale - nugget=0.1, # nugget - anis=0.5, # anisotropy between main direction and transversal ones - angles=np.pi / 4, # rotation angles - alpha=1.5, # shape parameter from the stable model - hurst=0.7, # hurst coefficient from the power law -) -srf = gs.SRF(model, mean=1.0, seed=19970221) -srf.structured([x, y]) -srf.plot() diff --git a/examples/00_misc/01_export.py b/examples/00_misc/01_export.py deleted file mode 100644 index e38294fe..00000000 --- a/examples/00_misc/01_export.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Exporting Fields ----------------- - -GSTools provides simple exporting routines to convert generated fields to -`VTK `__ files. - -These can be viewed for example with `Paraview `__. -""" - -# sphinx_gallery_thumbnail_path = 'pics/paraview.png' -import gstools as gs - -x = y = range(100) -model = gs.Gaussian(dim=2, var=1, len_scale=10) -srf = gs.SRF(model) -field = srf((x, y), mesh_type="structured") -srf.vtk_export(filename="field") - -############################################################################### -# The result displayed with Paraview: -# -# .. image:: https://raw.githubusercontent.com/GeoStat-Framework/GeoStat-Framework.github.io/master/img/paraview.png -# :width: 400px -# :align: center diff --git a/examples/00_misc/02_check_rand_meth_sampling.py b/examples/00_misc/02_check_rand_meth_sampling.py deleted file mode 100644 index 58d998b4..00000000 --- a/examples/00_misc/02_check_rand_meth_sampling.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Check Random Sampling ---------------------- -""" - -import numpy as np -from matplotlib import pyplot as plt -from mpl_toolkits.mplot3d import Axes3D - -import gstools as gs - - -def norm_rad(vec): - """Direction on the unit sphere.""" - vec = np.array(vec, ndmin=2) - norm = np.zeros(vec.shape[1]) - for i in range(vec.shape[0]): - norm += vec[i] ** 2 - norm = np.sqrt(norm) - return np.einsum("j,ij->ij", 1 / norm, vec), norm - - -def plot_rand_meth_samples(generator): - """Plot the samples of the rand meth class.""" - norm, rad = norm_rad(generator._cov_sample) - - fig = plt.figure(figsize=(10, 4)) - - if generator.model.dim == 3: - ax = fig.add_subplot(121, projection=Axes3D.name) - u = np.linspace(0, 2 * np.pi, 100) - v = np.linspace(0, np.pi, 100) - x = np.outer(np.cos(u), np.sin(v)) - y = np.outer(np.sin(u), np.sin(v)) - z = np.outer(np.ones(np.size(u)), np.cos(v)) - ax.plot_surface(x, y, z, rstride=4, cstride=4, color="b", alpha=0.1) - ax.scatter(norm[0], norm[1], norm[2]) - elif generator.model.dim == 2: - ax = fig.add_subplot(121) - u = np.linspace(0, 2 * np.pi, 100) - x = np.cos(u) - y = np.sin(u) - ax.plot(x, y, color="b", alpha=0.1) - ax.scatter(norm[0], norm[1]) - ax.set_aspect("equal") - else: - ax = fig.add_subplot(121) - ax.bar(-1, np.sum(np.isclose(norm, -1)), color="C0") - ax.bar(1, np.sum(np.isclose(norm, 1)), color="C0") - ax.set_xticks([-1, 1]) - ax.set_xticklabels(("-1", "1")) - ax.set_title("Direction sampling") - - ax = fig.add_subplot(122) - x = np.linspace(0, 10 / generator.model.integral_scale) - y = generator.model.spectral_rad_pdf(x) - ax.plot(x, y, label="radial spectral density") - sample_in = np.sum(rad <= np.max(x)) - ax.hist(rad[rad <= np.max(x)], bins=sample_in // 50, density=True) - ax.set_xlim([0, np.max(x)]) - ax.set_title(f"Radius samples shown {sample_in}/{len(rad)}") - ax.legend() - plt.show() - - -model = gs.Stable(dim=3, alpha=1.5) -srf = gs.SRF(model, seed=2020) -plot_rand_meth_samples(srf.generator) diff --git a/examples/00_misc/04_herten.py b/examples/00_misc/04_herten.py deleted file mode 100644 index 1e1b8a23..00000000 --- a/examples/00_misc/04_herten.py +++ /dev/null @@ -1,290 +0,0 @@ -""" -Analyzing the Herten Aquifer with GSTools ------------------------------------------ - -This example is going to be a bit more extensive and we are going to do some -basic data preprocessing for the actual variogram estimation. But this example -will be self-contained and all data gathering and processing will be done in -this example script. - - -The Data -^^^^^^^^ - -We are going to analyse the Herten aquifer, which is situated in Southern -Germany. Multiple outcrop faces where surveyed and interpolated to a 3D -dataset. In these publications, you can find more information about the data: - -| Bayer, Peter; Comunian, Alessandro; Höyng, Dominik; Mariethoz, Gregoire (2015): Physicochemical properties and 3D geostatistical simulations of the Herten and the Descalvado aquifer analogs. PANGAEA, https://doi.org/10.1594/PANGAEA.844167, -| Supplement to: Bayer, P et al. (2015): Three-dimensional multi-facies realizations of sedimentary reservoir and aquifer analogs. Scientific Data, 2, 150033, https://doi.org/10.1038/sdata.2015.33 -| - -Retrieving the Data -^^^^^^^^^^^^^^^^^^^ - -To begin with, we need to download and extract the data. Therefore, we are -going to use some built-in Python libraries. For simplicity, many values and -strings will be hardcoded. - -You don't have to execute the ``download_herten`` and ``generate_transmissivity`` -functions, since the only produce the ``herten_transmissivity.gz`` -and ``grid_dim_origin_spacing.txt``, which are already present. -""" - -import os - -import matplotlib.pyplot as plt -import numpy as np - -import gstools as gs - -VTK_PATH = os.path.join("Herten-analog", "sim-big_1000x1000x140", "sim.vtk") - -############################################################################### - - -def download_herten(): - """Download the data, warning: its about 250MB.""" - import urllib.request - import zipfile - - print("Downloading Herten data") - data_filename = "data.zip" - data_url = ( - "http://store.pangaea.de/Publications/" - "Bayer_et_al_2015/Herten-analog.zip" - ) - urllib.request.urlretrieve(data_url, "data.zip") - # extract the "big" simulation - with zipfile.ZipFile(data_filename, "r") as zf: - zf.extract(VTK_PATH) - - -############################################################################### - - -def generate_transmissivity(): - """Generate a file with a transmissivity field from the HERTEN data.""" - import shutil - - import pyvista as pv - - print("Loading Herten data with pyvista") - mesh = pv.read(VTK_PATH) - herten = mesh.point_data["facies"].reshape(mesh.dimensions, order="F") - # conductivity values per fazies from the supplementary data - cond = 1e-4 * np.array( - [2.5, 2.3, 0.61, 260, 1300, 950, 0.43, 0.006, 23, 1.4] - ) - # asign the conductivities to the facies - herten_cond = cond[herten] - # Next, we are going to calculate the transmissivity, - # by integrating over the vertical axis - herten_trans = np.sum(herten_cond, axis=2) * mesh.spacing[2] - # saving some grid informations - grid = [mesh.dimensions[:2], mesh.origin[:2], mesh.spacing[:2]] - print("Saving the transmissivity field and grid information") - np.savetxt("herten_transmissivity.gz", herten_trans) - np.savetxt("grid_dim_origin_spacing.txt", grid) - # Some cleanup. You can comment out these lines to keep the downloaded data - os.remove("data.zip") - shutil.rmtree("Herten-analog") - - -############################################################################### -# Downloading and Preprocessing -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -# -# You can uncomment the following two calls, so the data is downloaded -# and processed again. - -# download_herten() -# generate_transmissivity() - - -############################################################################### -# Analyzing the data -# ^^^^^^^^^^^^^^^^^^ -# -# The Herten data provides information about the grid, which was already used in -# the previous code block. From this information, we can create our own grid on -# which we can estimate the variogram. As a first step, we are going to estimate -# an isotropic variogram, meaning that we will take point pairs from all -# directions into account. An unstructured grid is a natural choice for this. -# Therefore, we are going to create an unstructured grid from the given, -# structured one. For this, we are going to write another small function - -herten_log_trans = np.log(np.loadtxt("herten_transmissivity.gz")) -dim, origin, spacing = np.loadtxt("grid_dim_origin_spacing.txt") - -# create a structured grid on which the data is defined -x_s = np.arange(origin[0], origin[0] + dim[0] * spacing[0], spacing[0]) -y_s = np.arange(origin[1], origin[1] + dim[1] * spacing[1], spacing[1]) -# create the corresponding unstructured grid for the variogram estimation -x_u, y_u = np.meshgrid(x_s, y_s) - - -############################################################################### -# Let's have a look at the transmissivity field of the Herten aquifer - -plt.imshow(herten_log_trans.T, origin="lower", aspect="equal") -plt.show() - - -############################################################################### -# Estimating the Variogram -# ^^^^^^^^^^^^^^^^^^^^^^^^ -# -# Finally, everything is ready for the variogram estimation. For the unstructured -# method, we have to define the bins on which the variogram will be estimated. -# Through expert knowledge (i.e. fiddling around), we assume that the main -# features of the variogram will be below 10 metres distance. And because the -# data has a high spatial resolution, the resolution of the bins can also be -# high. The transmissivity data is still defined on a structured grid, but we can -# simply flatten it with :any:`numpy.ndarray.flatten`, in order to bring it into -# the right shape. It might be more memory efficient to use -# ``herten_log_trans.reshape(-1)``, but for better readability, we will stick to -# :any:`numpy.ndarray.flatten`. Taking all data points into account would take a -# very long time (expert knowledge \*wink\*), thus we will only take 2000 datapoints into account, which are sampled randomly. In order to make the exact -# results reproducible, we can also set a seed. - - -bins = gs.standard_bins(pos=(x_u, y_u), max_dist=10) -bin_center, gamma = gs.vario_estimate( - (x_u, y_u), - herten_log_trans.reshape(-1), - bins, - sampling_size=2000, - sampling_seed=19920516, -) - -############################################################################### -# The estimated variogram is calculated on the centre of the given bins, -# therefore, the ``bin_center`` array is also returned. - -############################################################################### -# Fitting the Variogram -# ^^^^^^^^^^^^^^^^^^^^^ -# -# Now, we can see, if the estimated variogram can be modelled by a common -# variogram model. Let's try the :any:`Exponential` model. - -# fit an exponential model -fit_model = gs.Exponential(dim=2) -fit_model.fit_variogram(bin_center, gamma, nugget=False) - -############################################################################### -# Finally, we can visualise some results. For quickly plotting a covariance -# model, GSTools provides some helper functions. - -ax = fit_model.plot(x_max=max(bin_center)) -ax.plot(bin_center, gamma) - - -############################################################################### -# That looks like a pretty good fit! By printing the model, we can directly see -# the fitted parameters - -print(fit_model) - -############################################################################### -# With this data, we could start generating new ensembles of the Herten aquifer -# with the :any:`SRF` class. - - -############################################################################### -# Estimating the Variogram in Specific Directions -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -# -# Estimating a variogram on a structured grid gives us the possibility to only -# consider values in a specific direction. This could be a first test, to see if -# the data is anisotropic. -# In order to speed up the calculations, we are going to only use every 10th datapoint and for a comparison with the isotropic variogram calculated earlier, we -# only need the first 21 array items. - - -# estimate the variogram on a structured grid -# use only every 10th value, otherwise calculations would take very long -x_s_skip = np.ravel(x_s)[::10] -y_s_skip = np.ravel(y_s)[::10] -herten_trans_skip = herten_log_trans[::10, ::10] - -############################################################################### -# With this much smaller data set, we can immediately estimate the variogram in -# the x- and y-axis - -gamma_x = gs.vario_estimate_axis(herten_trans_skip, direction="x") -gamma_y = gs.vario_estimate_axis(herten_trans_skip, direction="y") - -############################################################################### -# With these two estimated variograms, we can start fitting :any:`Exponential` -# covariance models - -x_plot = x_s_skip[:21] -y_plot = y_s_skip[:21] -# fit an exponential model -fit_model_x = gs.Exponential(dim=2) -fit_model_x.fit_variogram(x_plot, gamma_x[:21], nugget=False) -fit_model_y = gs.Exponential(dim=2) -fit_model_y.fit_variogram(y_plot, gamma_y[:21], nugget=False) - -############################################################################### -# Now, the isotropic variogram and the two variograms in x- and y-direction can -# be plotted together with their respective models, which will be plotted with -# dashed lines. - -plt.figure() # new figure -(line,) = plt.plot(bin_center, gamma, label="estimated variogram (isotropic)") -plt.plot( - bin_center, - fit_model.variogram(bin_center), - color=line.get_color(), - linestyle="--", - label="exp. variogram (isotropic)", -) - -(line,) = plt.plot(x_plot, gamma_x[:21], label="estimated variogram in x-dir") -plt.plot( - x_plot, - fit_model_x.variogram(x_plot), - color=line.get_color(), - linestyle="--", - label="exp. variogram in x-dir", -) - -(line,) = plt.plot(y_plot, gamma_y[:21], label="estimated variogram in y-dir") -plt.plot( - y_plot, - fit_model_y.variogram(y_plot), - color=line.get_color(), - linestyle="--", - label="exp. variogram in y-dir", -) - -plt.legend() -plt.show() - -############################################################################### -# The plot might be a bit cluttered, but at least it is pretty obvious that the -# Herten aquifer has no apparent anisotropies in its spatial structure. - -print("semivariogram model (isotropic):\n", fit_model) -print("semivariogram model (in x-dir.):\n", fit_model_x) -print("semivariogram model (in y-dir.):\n", fit_model_y) - - -############################################################################### -# Creating a Spatial Random Field from the Herten Parameters -# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -# -# With all the hard work done, it's straight forward now, to generate new -# *Herten-like realisations* - -# create a spatial random field on the low-resolution grid -srf = gs.SRF(fit_model, seed=19770928) -srf.structured([x_s_skip, y_s_skip]) -ax = srf.plot() -ax.set_aspect("equal") - -############################################################################### -# That's pretty neat! diff --git a/examples/00_misc/05_standalone_field.py b/examples/00_misc/05_standalone_field.py deleted file mode 100644 index e467f043..00000000 --- a/examples/00_misc/05_standalone_field.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -Standalone Field class ----------------------- - -The :any:`Field` class of GSTools can be used to plot arbitrary data in nD. - -In the following example we will produce 10000 random points in 4D with -random values and plot them. -""" - -import numpy as np - -import gstools as gs - -rng = np.random.RandomState(19970221) -x0 = rng.rand(10000) * 100.0 -x1 = rng.rand(10000) * 100.0 -x2 = rng.rand(10000) * 100.0 -x3 = rng.rand(10000) * 100.0 -values = rng.rand(10000) * 100.0 - -############################################################################### -# Only thing needed to instantiate the Field is the dimension. -# -# Afterwards we can call the instance like all other Fields -# (:any:`SRF`, :any:`Krige` or :any:`CondSRF`), but with an additional field. - -plotter = gs.field.Field(dim=4) -plotter(pos=(x0, x1, x2, x3), field=values) -plotter.plot() diff --git a/examples/00_misc/README.rst b/examples/00_misc/README.rst deleted file mode 100644 index bef7ae57..00000000 --- a/examples/00_misc/README.rst +++ /dev/null @@ -1,9 +0,0 @@ -Miscellaneous Tutorials -======================= - -More examples which do not really fit into other categories. Some are not more -than a code snippet, while others are more complex and more than one part of -GSTools is involved. - -Examples --------- diff --git a/examples/00_misc/grid_dim_origin_spacing.txt b/examples/00_misc/grid_dim_origin_spacing.txt deleted file mode 100644 index 024928d3..00000000 --- a/examples/00_misc/grid_dim_origin_spacing.txt +++ /dev/null @@ -1,3 +0,0 @@ -1.000000000000000000e+03 1.000000000000000000e+03 -0.000000000000000000e+00 0.000000000000000000e+00 -5.000000000000000278e-02 5.000000000000000278e-02 diff --git a/examples/00_misc/herten_transmissivity.gz b/examples/00_misc/herten_transmissivity.gz deleted file mode 100644 index fa1d00e3ae61c3b8c7404a58cc75319b63400e36..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7274190 zcmV(>K-j+@iwFq8!zx|^|7c}$bY*T|baG*Cb8TsJb7^*Iba?=Ty;-{EI+A1io};)E zsQB171FHE?;Tq^JTX5f=6_F1aadiGIi6X6q0pmYopL6ypzy8x!zyJ6Dl=}aT|I9JY zZT-7&@5l?c-?qLB=g#lKzIC;m7i#-1^m^`^!!Wl>o)TqJ$3-A+w-Z`?S14o@8#y>Zo5!#`*orJ z=av!A`z=ZRRyQup`1#D6m$kW2#{3p57U6r`viGjJeJZ=8>c0DX+*qJ|!COaNJZ}GX zVy$hgoi`tidz>?O&0ma}7w+L6ZO{B3qsOY}J#NiktQq@i&V5jy<1lwZE^-LzlgIkd;a3)J#HM-`MqGQPA6TN;~RVz?%WN_a9aN2jui$Z z*J|`M8F$T@r?$**t;UV-%nQa1zYDQ1#^4x?v_5sNd2`w0xO|4J=nQfp*J`a(Pj1oC z_jk_2K<#?IPdipF;);I@do@PBVEieWliwHSUf+AfwyHDtRlMr=1>=*21^>RsT;==4 zi5HB$k@vX2o@&Oat#_sg2=n+o)j7RYei0_>_b=iA%U*uJ2)oUNnXeHy&&R#t3Tj@7 z|N8wx|5oJ}qkeCG;$27D_Oa9Sg>9`hA9utq{@y%J)L_|1TDaICd*^PCC;z@jIIb|3 z=?B|7xVy7UUa0B3;-u|e7AFel6$_dcuE#C&o5#X)q2=nv!TjSwuGJZ5Hw?f?%W&@Q zD-Ci#n-3NCBRB78t^U^wvGwE6<>Tgi#O|3TSGV81#}4PXZZ6^(E8nA5woj~Xe!&yp z<6Fb{MP3NMFz@E6`p$dA?#oRb#v?a%JYd8P+#`Pxo^R%fiqIgJeTSLo4ey+PDy&u9 zB>z;#3^KauYU2*~+YE{q(v&DJ>3JdSdH$lsLw+VBY-`1zpY#UWBxj^Q@3|Sm?8PFU zypYx>KA9Xy1`YWITO3Y4RJetc_t>$xEj?%MtFUPiv!r((W#omp^}%xz0nD(}`KRLM z`HQ<62A~dNpEx)(FNE((kJDqd(!6T9@*aIpUa%LuS{Q)%48Q*qq2A|Gb#P8zhzHCI zu^UePE|lLNDuRiA!d8A4;@q5!ORlW&_ltPO-xngt2;Y)_>i&N79_J%`?Kxand11t# z$_xAV-i-@!o(4uS_HkZ_ZxHA0`^E3jATHcEX9SA9=gn6eGoy^ySYLb<%XbzZD&FXO z4{b&E=*kNb4~;a5@nv&KcA0gjKUeJh`@)tt?5C2g*-a^A|fpS2C})GI~9+ZiSRA-NbBRGYTB<`+i2yp5%b z4V#W7mLVTEqSg8RBD`qHlqH<&_lt-O;@srT*UE@&tysC5hxTODx0M83g4FM+Vlg)9 zQe+R`&8xDXymDrof8SmcaI&LFaMORXkHk0Vf4m5 z#{KTmRLp4ki^4Bfm`ZkH&DE{1Y|!(mPVTGNI@4aT<))6ykT~$y+<1TIDVC+Gwkk8lQFE&7|xl^P%I7a zw>OT#EBECW%q%q$VZxT86M>HFx4D>4dLxSNl_$<0$gDh0@EiT+ON< zIuf6oc&3_~OjZp}MBuGt%y2Sexq6gb-3TqxiYk}LMU3z|x6g^hW^-!OAS+|pX|H0P zzqL|67I73iapo_=kG8bblM4|$lP-m~`&Kus`MOz2U9;BwMSS!0wGqL8UoiGXJW5*8 zlHa4NK9kEHsYM=~7AuGWsQ5u;Ud0L!xp#LMfGiG-n-_XyPUJolyW96sm7JL9TisaB z$QpQ$or@U8Zl!HsWi;*){v)?&$v+jTdRP!HWO+wb+%PBPiTS5iWCmICRvqMf^Ee;f zs!_QaRC%jx26lVgqMo!Rt74W;95l*Q7b_a3dQw|zce3I3?SwdK!rM~pjITW_A2*Cf zTI$t^eblEI9OtYK9{rDObBvSa&q&Eu9>Jp2EUO0L>{zQgvj`}+lyPDh&YA&z1iO@X zsGdhAG7fpk%tFdk5%=QNGIWl}uvx&eG82wqEV7J_E6R#j*#DgdAYN(mr|z0m?lLi1 zw7Y5@J;7tT4nhgS{07DT*x zWPL;cGI3eth#i{WLkWnL6E06REDhJL$_q7X0RQ`ih?YB5z+0$gD6Q>W$X`SzOpQaQ zGe7N&-Ja>|SJKmmicmcdPTARGX{+0thm*-PT%iOuadK+I#N15jXG_Zv99SX3(3Mm*#=S(?<}pZKWs=Mn8jentQ}DP~4ANLF;S zJ#duWr&kLnOfYe0%clw#O8BATYx<~?ix@ZG%R^_XwIxr7o{ExpxD4BfUu{X%bCs5f z8FpH@rN^y*M`Wt+#&OQg9%hAK+)Bl3>@=@S1(V!~!#-4;kDE5J(<}}*SlY6v58EE$ zYbCYJryAPO9NLvdr*U;aGPtAkvX08p807pOkq(AQ&h2wp_8!~hu!z~5+z~LeJhg+O zQv|zvS~ih|->s%)R?jA#h86J&4bX5`+5Z^vr8Y@JHb7Uzd}R@A=cbNBGYe@(p)3-v zvm7?uqQ?^ZCsVJnvZSX_YtVui+4PPx(`x$Eun~Epl>4k7DS$4-{j&S2=rdP$TNG~R zJnX`*m8fr(j2U{^53-{4&bKQgNnCf8pX?upX3MMVDz0`f;&Cj)PFw@ttJ=dbw zwJBL_3J?lwGRKK%i5zDVceZuLhP7qFURI1;hlxuzIO(Y5ew`^ zeXeFo)?+!S)?R{C1=NH%YU+N!7^?5yEcIm)*H3(rEGLB7&HysvO$*R6wY2R*Yov3Iq$+ED za)Ox~#23s9@+*rbYwq1Jm74`;x({}ub!AVO9m)wspV z?vh|+oxVZ7M~z6o8`|iLspDC#Z6L3ot*u_D!<8?XP%});zP$O(csDk6WDqn)YW`w` z|6!NVYV3-P2Xd8#%yn6&YGbz|AX@Pf<~3L%&MD5b}5=URp5JB4Mh z-{V%*eRejzNy)+_3hYE@^t+&aSQ{HI^Fej{=@m!RUvszDLZLCXPcF#=W`Xg_#(Gy7 zhR$!z{$Lj(rBWPd`90)0={vVfcq8tP_{@5DHNRlkE;4$W_mPATb1d6lFpf5xdZp=8 zE#Xv*WhT|J6C!7zi(K;N4>VJ09P*(eYUe#hzMzmQ7lZr-MehvKFu|+Eg*XJ07IrCY zpIB-d(*5^0*Ez3?Oh0yF-dw(xUzEWvi7Y2G;W*sirxiAU@S~oziNg5S2`n^Sk?JTb zH$#N*xrlu=GWvS*9{oi0L1lX;Y+;-~F_3)0rOlp4kkuV$#9;x;nbnV>{GXk;*d-BJ zo(h(8_S_}Kku5Xr)hpv(Z~!;g>Z)h}$aCac#p&p?0nPqy-omHN3uYUS(^RvXf1?w^ zd4&lkbdL2*uhvy3T0m&CwiL_0)8p8(uW*}m;^NbOwnU}cTdc93!Nk=`w*p;80AVRl zqu1Ez>Yq5otRk50`Vd&w@Xv5iMSTlc60Kf5vMh#gk7TlC^*mfV!1plDR+=EMy)AYR zjy?-1s>!of3ia*6ps+PT#P2_>esAQpVKM+ASkYi#6tCakCkAf@DSs+nlG4Dg9y7~C+6eM3A#-PUS2Q@rdMsJ^C4@d zq3+`eV_SnrqxKqSB|pch>9!+AjQsoqv7J-xLYw@eB#3Qkv92v*Lb)Q$sQICUFigwv zvV**N{wbmKF572hyQZpUk*BtDJ|bSx$O%}^JU9`=)vPTEHc)wBgdM zOb#NH4DP~lDMuzq%?n32Cr)G@r-h?W2hv6dr8nd2B;0sqSzW_CgDpC2ltlS`2U0)* zqN*FlYzFA05@vX^l7XCa|=P-gnSQ{T{6`TLa zwGzmmo3zb0j}y?EEqj7)hfE@H{OrU7!BAhNKs%Lzq;A3ZqQ}4Dur{X0lUO51!mIqOI$z5_4brDm4vjD$( zca$uecN8OrCrG^jlw*%BMlkM3j}y5e_1(6!0jiqFthRTvyc5CwU<|5c0YWH!D_?q<52y$ltvv!zpi+`Rj zwNcZL&$n%GiaE$WBcL0g!m|6?Bj#huw2%B#5pGUONb!v1AM#RU^cm^Lh4@nGiioMJ zT7Z)cXOX^MaDp)JM93RRL1YS*4>eaF&h}L%GR?G!F$1?WGHICVXVFR_|A?r<_-bYJs?l%<0;Ln~8c(3MRC0)B$fxCIh{!2xG7%flqN^(_6GNqQY~s$Y zOvKvZBiW+qo#QbgMY3Rmr&5?|7m5`#SGT%oRtHtdJWPgVkXn2AV0Otj{NDa^GzMqw zv+|Yhg|eXA*FT7PQ-r(;&wJe?f6)c)%>GVRox`l}I_S91@8+=$I}pMCLDHJyl5L|) z-<(%mi2Tz-PWlEaz8~JBeShkXwrp6=ei0!YaD0KNDcvt~!URR}R|&5)n4ZhN-b6TN zDZR52Q_Af<56)7{s}ZVY47W92PqcJMW%REfv+Wtls4Q{L&$_5^HJ$Ogo4=Tvtr?sT zDtFp3mM&%B6(_JTIjPR=f4i_BN}rM*f$*s+tMLoM+Oa!x+at@k=Vfa7f;CIt=P@1} z`cWA`j)(aM{AKM->xI>kUQI0z#+oZNGBvu_&jR*%$o@lIvJxjF`1juCwiCD*Cazjy zl10E45z%$`oHff>z)B^xWzAAZ$6`ZG9cqED8bYS@ltqTRlhj_)`keiSIewNCV)rPB zBKDz*6L-N6woS$|4;_W@!MU$ubqYpiXK_HI?AGU;M18S5w4)lp#oblH-k?9Z&;^jj zJ7@H&Hp8IwGV^i6n2c_m1BY@#L@$=`=2MlkXz{kXIOoUaKjH-sqlmE=xNzy8`*2qL zA}oPGps#5c(TDBm*#xerGFv!ibMIa!k=?jKhZAf`>oQG|nesT1pF-juDUq%@b z;n=<+ppTSlG_VGRVvRT?c=N3LJd}R3#v!&nR`A$bF%Eo$?Bfl;KXqFddK4f~mcq^* zwHm@C8$3sFhH3Rr2BFo2pj(V(5UhCpBbW%^aqMtrtFfuONCmQFUriVk8q(od>^R%x zOZZBLlp}k#noJVr=q0QG0e-g%^J3*TZXVxb({Z2MI#`pcL0dUXrp_e8iw!(=xRuS& zUp!tu)#Lw%7P#|mAmbGmDd>PL~U+`Zkm$8OIw$B8pKde=9a=cbJCDxfW z05vwu8&s={Rv?hTVO|<;m{e7lS>0o}j6H4Hg*eWG!@de%*$gqBOCm!wz@&`^K2dAP zg=8Hn-3F?J_u1*Z#0ao|&X|Nt?La(4td;&;HMPb_%*6UsC`8ne#Mf>}VB(xZ@D;9$ zwVK_-b0&++qLCYh6Wd;ClPe`Ze8CFG*6XRYpB66MT{8$&xg2L99jR=eyoW}rmyN6B z1qIR+GmLg@%x2}!W>hF)0VtTBd_e`7vl0jwXs(ulDfE#ct&%YTeg$kneV`mn-+9y& z)?NcZOs2Lp-O0Ghrk5(PI_Lb@g)lK$jheaJ`@{1?S&<`SO~q)iIPl55E?ipb2K7cb zyLi7!e%_oi5AfH%u)S3STBo~ve`*qa2$h#%)NKr5_Ex|FsP&pHx@{3zEVa?68P-6< zAQT$&%~c$PrNFq`!j_1t2)|PQ+XW@(J>!!2W|)j?pu*mURR@h9t_{qqCQFO4)@@M5 z7r>wRf$|q!CX32;E{PIRz}(RCg^#E-4*P&~WNUnpp+AP@!v5dPisE~~zBdVJ2BG@SJ&?5RdsAzu6*us~%-?{HC-Sn~ykhNnq*;kg@nZ%!?LY6x3NeV0wW z&!UOc0EK7>qRjqx`uf(Y8_8a$CoCKXrAfw5h1iD0!zR6}P&!Z;R8^%IktK60G-!5n>vBDhL6I&aLJ{h*I6F;c=F}ju` zryztKZFCxdk!T+qq+q34Qn+JWFTnTC67Ns$#8M!X(ilEi=SJdQwnYFVs~`8_-k|v8 z;0RK%Icn)3BeWMx2jC)1g~et@+rb+&wH^3^Mw2)zX!V3T>U_jAWqj2uy@{p4O}ejk zVGl3XY$G0>5Q#PjRK-Y2i8Y=ZO3684Dzs+UFXpexPTWR9N%h=3r@H+x8#RuzLX>ql zID`gwtkrEW1iPyu;3bAzc46d)sz3xa1>?k{#6AM-*f=l&WE@+KQ(VJh4h}HMsfbrP7D(+Hu585U(|Fv-4ag=MKsk}-T8@KD zjZ}SKSnG0#KNcZUB#JfQEaPO?pBR>hFl~S)YE9g0obe4KP@AA8_pe#4jNv%v!WeQm z332{db@9vQ!HGkA*Uzjdo+Hww_H*(++?4{2pIK2j*9*f&s|n1CWZ8%`8biuLsvaVl znZT?FN(Pj9y~CQVeoaRD9p|+k@i3$cpo*3J58J|b!36>HKxfm(v4Tk-5y5w4GR9iEk!1AC;~Q{6ISFi3_*ukQi_91O@6qzn|Xs? zGymACms%lZLtv!Q9^iW!0fdWCI)QhF3+cZ3#cy=FGaLSxIl1{^W^}@0+;{rH`?!d5 zm+U+S;se-ZchFGpp@wAt8JFxXR5FhXfBV+(E2QiDPhwtz?R<|d$`hDXG5SY8H%?G! zUHh4U>y1=;t2}t^)8@eHW56|7=wZx!kk`h6NIro2stz`aluIH9LdlyM_oJR5Wy5rL zu}&A1w6lV=8y zuQ1?lB7JQbfIBPXjSSvu)`~XZ0-iLc^#@nNVWy}G>8sVGZ;s4hH*cT8*n5m3Iy@pH z7#fAQG&~J*1SOW3K}lYV-kSB61OE!N(!Z7(KHh@oQB0kd1&sI4LXx z1=A^7UuMHg`lv3x6^q;R1z%ExR6-?AEN?u5U@6y1_PiPEY5H2xlw?V`Z>xCKwv8Vy z9lb^k2q61N{~;jg+KgHe;)Iz8@!JbL!YcKV_6_4nZ6XxO*k_YL3N8)>E)d$Y{s2?l zY+7g;=)U$#hc`|RM#k{F<0wjDCvN&5+aQ}v#Ye|YxEzCbGrFl@Y>QDfrJB$Pkyfm~mtAFCMSZyK zN#8j19zL*lF6i=U~j|{lZlb5#xGPsZ4*FN z8@!!JaA@H0Y#2$&KkD58#>YuCRqVDn+vz_T;Zr-PX2)c3wiU@0JW^_;_&j=tKH8p?^htN@zwZNE_)<^SZ;4;R6 z`!f;{W|t=CLj+9#X!^+!z8y!35%8R8P_#7qGTQvkF6kt~s(y5$zKGWD&nRO_`J{3< zEeemYuNW>oH-ie}v}HlW>swf`O=1$}pBDWaAIld!HsIDPar^e-SrS*jG{gA9JJv^O zM(@TV2;b5%7V|!)5zScAFc|t%{D1Nn)07<^s|G?Xomg~Q)1r+@oEZ81zd-4cJ6Et# zjUJ?COqq|eBFE6&1pQjfQW)1wc;0G8$V<%^&mgaz+I}o9$rJU&CkXT#%=JQS0hI@Z z6mIa)+`gI0h)0W(x45pU;@pv=p^=KDW7!IRX3p?hT{Ik#4U;D77LqJ&{WvRDb}GWC12*spA5Gw(cdoJ-O*b~-!3L{%oU zZSX(40Mf1@!7{GNN7G5h%iP^Da>}wYfRqje;}J$W692r>I4tD=P+lGGwp!b)*3v4+>no-OmJ%Xq=kOzAo#l zRVP-X0V)>Je^9!J&SiBISI9{J2o(T0GJ7nq+Ts^zdJpUBs|*9&Sb^=&RHmvH<=hE2 z^hXAu%0vfYs!w&X01&7~Vw)yokQ-FXuk^Uvx-!t&RZGbtkb%dmoI;O9Ft*F4zL=$B z3^W$j)_4-?iF4b0R7b#}k>f~JVwzG@I{Mfp)hI+jQ-zy1hNgxg6Z!jQ+_6tcLBrcZ zVOX>DqUro{5G*SYUG8O5*sj&sEKbbn%oh|%l7*7DDKB0ocv_>3E11Oo2WCRqV5E*TIq3_{vOw$6@g;;5@YK#70oKqVCU#IfTHGhiIMq28n@ft`$yUCS^ zEGWMqM{S8X(2}@}OQwbd!Q$`*BO}_NEGANI`+@hMT-|nI5k`%bF`NSWbKu_{q)W@D z==y9_=*LT1zsNE(`YnQC2y@T|6u90L*?6j}*7mj~xoqkQaqU<}RT?`N88x~9BG%nT z6+~IRHq*PC&G=pwW(xv6#C;#WqfIR7vV11c7dDYK~>jC5X4?+(Dm?P->@K9%j0izw>0MptHMnjzFHJQIB< zxoH)W2kY}uN{Tjr70l8sDeWTJwVLVDf~qufgJnxHQg)?Dwg-`+hE~es9iu3lGB_H* ztRVis$uN<$R$8eNoJk^B)%TH=*d}}shJL|^oR+#?!8#B=U|%UN$@0#)DYYX?CEze) zreerbg4i%xBh=Dm-8-zq zQ$$z@AIKlzaq>fj!LNf%_^ZR|g76}#)36Ynk%>CSDckbR1++!@U?pj;hU+fdClx)L z3Nof|05|R{aZbjgHWav4Xjqp*iW28*wj}MYet>We5U4P@3poajAJZ5aCVW6Kb3M|5 z+&&jV^yB@()za=prnVruT&A6=kvIe#P9=uB)?-Q$Y-o#4wFqr9OuS(aHLGA9-hxO%OFsVnYd!8 zFy@4}IK*r!zVaTCIx!?r?9*-|Nh$>ttMxLToG2rFnq-BnFAG&JC|ReL=!=riHXtfe6Lq_7GwY(y-9>67mxzG|cysy~`nGr-2C zWon;xh>$GJsNt_9`GSVR00}4)iX2=fiqBYXmSmtn9HHGZ)#;we=p&y=D;fu7n-Iks zrXQ7mYKk*={EYhM<6+2SFkz>Z05M%MMnk2dx{(+_b8?D`nKZoBZi3>gA<1ZB#+or; zmpJOhXgo|3MHMH;AO{_Hso-7O{i}d_(c!eGO5D;@CAfd*208{2T%oRlS3BxRH#;A& z<7?|98aU8!ix}ftM26<%la`{dmI3`6FhK>QX8vHY*U2vIS!T)zRCW^Zc|CrK$A)GZ3zi{VG)|&p~+Y*rdfaF|jrp*pJ;)(NmPd z$8;J}Gfe6T!*m+*e8dWhS}oTqEw!-5He?D((!Nh>I4wgumOY&Xs>7`M8e15$vFTt* z6PmVcG2w#>wzh(9+o~-ZtS1dwoc&!X0&=3%%Rqv|q9qmdDg5?!k*Klv6XzK~pw%b! zcE&c_e1OpmKPa(=n?N170%(dGA8G)aYR!nOZevPE*+DZR42G1|mW{*=LNx&6ijT<+ zQqSwDK^cEyMOC+|tu_qaiu{;_I}4TVEJruZiJJacj%QCjRD-C@6uLLWSQ75bP_0T1 z0+e3PhO5cSXeg8w=Rpd%2V{Xtcnt8zV7ic1%n4RHKXHNZqcHg*?>I{KiWX1RHB1gi zv!jr%3c2vpX2_{=a&8VM;4>LiYdTY=BDA&H>5YKHy4Fg`=v9;v!w6R(j~7_fkmUJ2 z!mV_g6UL;Yp@KH7Gdh#9yE+CEWjQ&V!BA6e(3>Aa0vi2|=Kq$BDCL_EO(H;Fu(DSa z-ezr=iAmBbp*4)&BQJEBQ6??6=`<)lzJJdGbm=IbYOF)~&HFFvatJ|^-dwUd%|@py zT=xaH9jl89hK5vWR3+6ze+uiSca=*L>yF6?!<#%_u@85?pkQ;hx)2x{AxN*P4v+csPuA}ZmB5Q?m8z>+3aJ#LPy zM0j9Mlw?&?B;pw7K%Lnp7x;pCHRH1s3)5ljzoy+4V8aFrp^jBM2(rMOF_E;3$dUU` zWabni9^#*p#!@P*z}4$YBO?MLlbxq(&@|Fy14r=O9jz4?VA%}sO0~9*!~uu~(#GXC zY@v@ThSlu|YG|+wBXeQjQyXXh{8K}0<8A$1-607;z>zJ9e)ta#m zl|wDO8bfT45ArXWk2o_25L>xvWP}d}NLm>=UaxHMk2jA`c5LdGvWPV6%bkBC2W0ez zDP%U-*xL&Ck)AUlv`@}wSws5fp9*KUP)u*oB#NAp6rk)Rfl;j%iN%Oyo5__JCS>)2 zGEl2bh84a?q|1df2%n73(`In08QqEo4jFJuV|hd^vZaYlSe#y*Bzfl0o@s)Z2*N$)m(M zVJB)MMk$EbWwtM~?XXLhL0+=EDV9akP+;%&3aL}O`BZ8Y+ng5=+7VGA5IPfsiOjMf zS{{);W@%Te`!qJ(Dx2)CH51NxvF_(E%{~ik7LEH|{^W`r*!3gZZbU;wri1Ya#Lit$v1A zIV$Aw)yj#@oY&oh>c@dMDk6z(n9Xg>UnV&j9yvpUvVAh-WMnyuRME_NWo{75VS<-n z)rN$DfGv~+c}6a=tfY6D7}5Ap`3Dh`qI?q|t&PrTUrs_fp@Gv_5l`NwYmWWQ*FtDBmj(Ucg4vG`t@mOaLk7>PS99>r$jsuBP6r_oNr z3uWH5FPH_PM+0{k2mzZiRbWAZ?pzp})WbA+zPz9?fBvA-k`s;?oF3gaEupY*jCmj! z5Qf@Huf_OxHGIb7&H;(>PS&Y4x2K>-zjG{4U|=V9eSk8C03nmC3k> zD@-GrnZzYw#!O>`py-74XoQ^2l7?AKmdGgD!U=ZQE1HOjO~fGTPGyJlIE28F6$Jg1 zDdOlWI#?2rG>u$ZboZFYIj}T8jS+~ky6chwR*emZ0g#3Q1ydIjYp$-6@$QnQH~rd@ zMOjH!8Yf_zP{)eK3qM#bADooi>Y7vsHKJtdDiG95;lNA=H}A%>8(((2sHTyjG1R24 z0~e4PJfgZQXQ!FlIB)+82H$p9L-bdV7$ z384f{Vut5Uv#gee8#VtiU9w|muvsOg_RUP3YZf<#-}U`%qG~hGxzyo($Ao!yvng7hG%mlP=N$F^f|)7Oe{5lNIkh8o>V5* z;~UjtEH?>N8FCZw!$BlI?>nFxx^vEE{A& zQD>Pf{HE3TJPS^kO3{{6#d8j-2FvDz>);o{p2A=@u+(v3VO~uSQoUEIYBWFbJzk+jzN8(W?%!V2Gz3E(yx)(7}*naL{da( zsJmIKQS!%>EDDTS|GpqcUzShBgHKd`Wl1%?R7HcP^=rc?;p!pmEQPh|#+j+%5&XF` z4Bvqwg6Kj)g>`5#A}uj8Z|+f-dF={mReL<5q4xnq2)uPvp zzS^qY1YwQVRYG{^6;UJ8ykk2*DcVHPHn4D}JrHX(D4vxD<4GU?W{rs5N&zCyIkd;& ziSigMif0ic9-{D!ub#22Or?m2Jk3&_Xr;t9#$1g%IcP1&M7UC4E6jh@aG;bhq9P}QY`qdgH$=DDR)1>9F{KFU*$2OWP z1ex?Ls4ex}iRx3(u1O8hN8Mx+%k_Yv7pEI>d&~On6~PbrAkpjH%Z6y`#tBP4+h|>z z_c`HCNEUOQ!50t$h=mttISVb_4B7v(F?}d>i%_g_A>-XRLrUG5&9DYf>86C4L6K;9 zb~Pa*W-WDU47hlJ){SB=&Q?^lp&2Eff&|HcYG> zX7$6nX8|HY2D^ari4qo~)OtEnRMY4MD?DDkM*vCDY1^Pqjb%^dS;zV)Uos#NoKErY z06i^pqf0g&jCYH{7F5k-tvI#IG%Ygce1ezTBmg(Y^8F!Rkl07cy}N=J#nDGr#20iy zCdu$6aB;NwS+97uNLn@ z_#7c;{4EPoVPc!4oH9#5ft6(#MgA>pTwg$`p?8bF}vlgyQ!Zj2MwD}T!YCm~Dl*t$&2--D~er(#y-Kc!y z#X*vF6hn?)3RYmFb(MvKhknAu7H-M%j|J772q<{-1=0Qrx8mqFI0sZUB+BS8D33lJ zMuL=xh;&%?eDhb!v@t07`Xb=y-9QNB=aM*M28-%PRcu}@s7|w_nG^s&1IkqitflAN znF))X*eFB}RnFM27=RQreix*MU0^Cpcv2)@)DvNK)F|RHq!Y};eHe(P*#PkoFCQ(s z41|{`poY~f4=R-fAtTct)NDWW1(sAlxl48;l*BVmK`{u_g#Jdr8mI-**NUkJqu+Pt z9~z0F2iYkI^`(&^8q$>+6OH^p5J^z<&kO(aWCrJU&gFDPe~{pJ?A1~r0jnb%jgdaK zg@6M^a;dJ-D-zTs_pUIIm)S*BL-%DgtS{?Fwae&qKeW$8i)SUI%$-%(rXbWI*_)bV zN8zZ98lbImAx_gYs?t?Vl(|?8SEw@-Bjsrv11S`d@K%^^BN~$;D7`y$6g3XWQ(e5l z8iv`vCM6Op$c!R8F|7OoI~a>Vaaj`wup67|!qo!vrzjQA4M9V#lMGUoN!z=|fo({@ z#6bR%2d_5bN)-Myq#-cc9+6!X*jv`y`%^i$5sdtT z;$bm*+Rft@Lu{Gs;}nJjH%K)~U(d(_3oW;IRn`nz=k2LcdhMwl65_c0( zIo3WCtuz-7YR!|LaM-KHxZ^hX9>HBvRRrJ?Zu`-sQ@JCU7{cgK%yib%br_@V&??0T z^a&oME8g%~`tv@F5lA^JRHfM~+R|6<=G6$#t5u^+^L~n`j0(%8)fhX(Ff853%ERm* zVmOvPJ_xi1@rDao?E$G<)-oQO)v&^EC*gfp%HP<*7YG-l5j_Ok>fSSO#4jRUi9o|f zlyXU?&_A90BeS9@adH|}^_t{^=`?gs?F`$)&5ay;$bVog3D0LSGR=Z+--zADj9F!d zcd8RduEueeY|6ow5a~++tmC{O;C|&R7@u!0$#{ILrpd}KS;phl7T-Kf1KDVNf@Z+? zgGfY&1iveUORQQ;BcoH>5I#Vctf}16sqbXxr>aKE=rn@}$~XqJF(4YRdP@u`my3+E zVus9$h~XXwKC{S^vSC;S!`yA-XbeZj`7lsO7k3KJUce1(=Dtmrx*xYkS(gt1oRpLw<1TQ<34dkwbI*ufwZwkYz7Of*GoXw43&LB z$`WPVZj0#E+Sm*5VwrBAxbzKyjf#?R0HQ$4D$W4)A}lD0BJpmqLAFY8H?O+v%n4he zadtdshYW29gltGh6N*&4++G^@UeH%X;>3Q1Zj3Gxb%{y{g+%bL-83? z$9mATSS6XcC2I(4-J+HuLZDjUr%tNt`iRPGFziAQDQGwYhgOrBro+24`MGa8FE0Re zK#RZJg;bm7@sez|Axaohf762XWtox|n zw#d9p$6d1nz~Sgk^bvNRBKf2swEu{&0G^0L%m6fy>`9IZ&VE&4;2pGS^4_}@mzOni zcSFx>K|`<3529%6ID(Gc;EJbg@`6df8TQr6ZC3J@OeEHaG-JkYDAdBq?jjkrG+az| zStBdPbUWbLo<}40nV3*E+!7zj5MY@oT6}Q!?t;)(YeoBPH_M7@2l*bZHmOR(ilN7H zkomZZmhw+U!jp9KlDo>)-QP@gETY(gAxf?gcB+%Qjcy(#0gaD9Ji5~WNQnY6&5<{+ zVwHwy|Ev4xsp7xGhg|bQeWW9%M3Zmc9!){sd=1D=7fN`8FL>HJ*t?%?7vBFKPv1x`aoZ!x?tO#4Ze~B0+ET2V-CG zX(yEz^}JH}Km~m?Cr64z6fx5j`%^9HX~DMrc@MQ{`B0zIxD0YU6`ac-(Y_96nc!YW zO92OsP4r7yWae>>^bvWC6r&WO?JA72&ywnnap5v=FuvxC*mR{>h~jPTE8z?{YU^tj zOI%@_a`!xGWQHos-Dte>=8=X05uAn1uteh1@wGUwN4=G9VkRZA6bq60_@?`{OQgax zVFb&Yk8O;LtD}{&G{OTd_)6yC2$w2M>DL-OOYOUS^K5{sr$;?8K0$>|cSFjvRtkeC zvtUGNeI0kbxr|cUlCLPM$K7UL*UAfeSg8Lt8%`YmF#PQ=tktF#1x)e??>x>C7TDN! z?4=CVt}(%vWnz>?gnfex>F(OEznmv(8=Ed12?KGSu!0w4zhfY)aKV&ujmJy;Mg4Dp z*~7ybtx{bK%Z+Z0^(2UVI&t;|d!YrfBZ1vN$W4`u#xgRs0q2SqhqOq0uQ;|YOjYLWcu@8 zDz1Qy29hX}2&l-McWWq!WotnEl`WyuYSal#4j2ho=(F}6*V%*LBYf~cFU zU21I&v1VpyP*JHouWFeg4xzIMT9CW4r;3<(^X3%$Eo@LOYpb%e;1ol6X=`DKv5$)B@ICR+pU8 zglUSHEfT8@U1>?zUA~UfL zshh@0k5w9TXk3cP_b0x6KENp0^Vy8ltukO=fDY9#qBIBzQjh9dMxO@crixf8`v%KY zjw7(Zvy@|Fo7aHW)Bu_oL<|E-$f;4MYMeEdnV$=BKjx7=Lkc>+92b1br zmAgrQuvSFNC7wI}?sIC{?*q8y2TT_8`*Qlf8MC^^BQ7E;WuntiRbASD&jNG}bcflc z8J4+$gapW=Oje>@sjlQ6ozn=>r_FzqoTHZ>FylkA^T`T44Xm2|D6uDnBLklZx1kie z4S!WK%~(nr=@l{U6noNGs6}a>2d799p@3pRNuVq~ZO_h6n7$2U)A&om1;=J*t#aT< zFd+KQ7amA`+6%$35fVOF*!r6QULV9w+=z{Bj z2)6Io>`-YX5v-IQDv;*2jJs`sNp;3|NgPoefG1VM9NoI%sTd-<16)zrXLis9rXxF?a;4HEa%i{I9Se+Q@IVOl^887H!BH-)nYlvc)XyYmUb^6 zD!=(wzG?hd11`N$jZn(yn?)<{q$9}rtjS7EY0xbC~8X5o4ZVKN?(m*EIEN$#Gy|u8#>dArhfvgjHn2iL(7MX>H>*u;KU7KvU7j`8AnUGYn=hUHXb;WE`AO$GC5E$p?lV)YQk- z76-S8V=F^DW5<6?kgU~(z)00-lq=xnY%X1~JHiLyUO6Adl?-C`&H$Z-@D3Fd1e zJQ>4j%-ryB2f;EmD)M~X^lD-~-_w*&8p;miIQRH~6L9WMf(6n^(;UWTm53BYhLb|W zodz~4Sf2toQX6!VX0TQgyV#~m(itW#*HW@ksL%@rU>a@}IR*_T!m&392MswOqSU;3 zd+L(#z@_s#Ya0Z68MWDD^v!e{)a`W!ZewVYG-(BrW}M5#$TW>5ElyP9H5akbP#EHe`H7%w(#?aR3EZn}2`4R<86FnNhinjZNOzCMTx#`< zUI=&ayXgE3aR4x!McZj=s0p5l198I(o_{g|iE+SKxr`!Vy*??`kI58vh2*Oc6~cbJm0EE7&YSe3?~e+4Y%TFL7s0foTr#;e9LhVGls zY{YSjMKOId?-B2Jn%VYD6qWN)dFf926Xi8LSSusfyMF!kcPs z_SIf2+R*KboMFTMeUHh04ayOP(2 z+iX6FkoQ>E8zXgh{?r@k#`ur)D?Dl>ub zRs(_|nuK4()*Ak}F!*N!rkMFu_1Cy8 zcyR_p9{mzGhkfsQg=KfrCB?;+>17@_jt=bK0IfB%ar-|giB5U7qUKrF@A6jDCi z>n{_IgsdDdy2!=CsNsT4*G8>Ou0(d%lpTsOO&N`7p%fOea-*@XMrHm&B!uVh37&VN zuHF6g;qK<&vc77OAmtq^>@(Qw=s_1zWcqZ>75UKkNB5i^)zKwna+tUpJN<*u$dmShBa zIovdk-|ulXggc4)K5dTnACcuW8@9fmhQgBtu_1{4X(*^AWCO$p0f(XP8?b4#AOygM zFy{+D+Uxvuo4e!9Ba3sTSdwZrx=A_9HeF;%uaU8sqNh<%*)R0Xx5bH6IU}esDXL~; z=Aw?H$_b0PT0hLnpwn<4KP~g;4Dm}6?cTpH{35_R_Mx`TV6{JL;R9K_wN0!Q7W3H9 zXBhAKA0+sEkIvzIyiKr?z$hpCYAk7}*GZ6zDkq0?D}gX!+u|($ga*|jnfvg6m~hAl zGz&QYVZy6K)R5aH77eqzT?*GSmJ2(6q^0oC&u)5mC=g4xui?BddDK+OLWqM-T-#=S z>a{RR4v!3y3cZp$VxMCIz2ciE9L)CFh-14r=bIAi=%v}d!jQdIA2UD2^zJU2_YzOe zFe=WCkp*TC^86|r{7kUK{5v^nV-Tdq>e9hlMxWN3X>HwZ@?*e|ebh>_mfgAllbZh0 z5GcPuDmtPl8_4Szy7wMsf3^wJ2 zibSR;A~E{TvT4#4%Xn+`cM!>jWbZl_N4QWcq%7(p^O`D&NGu8Z^3z`Ny8d8Ednamc z5TT(^vJUrRcA}`$fm2FWfC)F_ z5KAZfv6!o&ZYZWjc@OOvnok8-Te&gWG*9bTr{xmOk*kbJ=41z zP-DKerO*i9(iQLU{=3ll=nEF{Zfz#x{N&w;uR6PAT0;sVKdr7EgPE>K)ZtXsvOxDB zn}d>2tn`A?Wf*$rUX#Zd7A*mY)ove_JHzX&`3R%HrBu8GzA!9sZU!||r*ST$(;zjd zf6#CC;d9#sC~!DV(@KetTulWAOWla8+pwk=glnf6pm8^5euQG2kdC07_52Mu0LYkOJbOe;mzsWs zT&NL#NSM~YVAe~XvYS!OEdZFB$u%8bMA3Zotsq6!1Sm3*Q)pwPCX7BOVD%+zj+lUZj38!K>GDyVAnmR>c=sl^)gV% zwGQD10TG$O24L2+yChbVKlpv}oR6^I(<<9pXXwsVTr9N=M|em;&N3GBYh9UEnY9$l z7D+>$*5^bqy_(4g7k<#^gji7=Ld}x6U5zvs{8fW?Obk%Q++sn|gN6Q3WK+}P5pDks z-+K~Hd;hFPJ*KJaG`lbx_5Q2m{uQ=Nb)i%fOr~rYN3*UU#JtKs#Egk!0WT_Sf(g7Y zs7m`bs?wV&nbfiDaWB)dSBP=WM6% zX&)NjOA5QBLFq!K^kaALxz z!K`e8M+U3SBh_2jpH+I ziR9Yr{hPZ@%O2?m11`0b0$ip%-fqa%jg&K+-gh7ZEG*aQ^n^Pcbk|7TOomguLGXo} z=@a4EYv_fsWqYika5};b|ADwYGZlGjvhm5CL>1+^)Ek zKqoL7`pQuz#GO?Gv7Z=X(2gGInW(X2(Q68hTlC<#CG_7!X&kOjfg3891TS?q`Y$xRdY13A+jsNgku`U z%~Pu}NGhpe;HWcPFv=P8mmbW9@H9$4zi{Gd2FnmZOF3tUFo4Ie-gkv!GGl@Je6h2fm*#Pq2@Oz8!9 zYlL%S8VAILBfcG)FM^?f**=!UoofWD`}oXgj%Q1MFhjM`?y*hHrw!u;Kf`b?Z^q?C ze}%F`q|qlioyLUvFAh}B@aowiv#FT^NIe^6(qX(&uMi8j2$~iWkI1L$3m*E|Vudf* zY$`soH7jPasWozJHRMc@fA6smDB#OB0NlYD4xQ+MM^dFjcZamdSM^Wf+6DEY%B#i!ni`94s zW2KEFWyA3sIfj|Oi51lV5Sn@|%WYDF^~f@dnUQ^*VhqZ#+{mGtdUj^p(l;Y9>5OMb zSTNz`>;)o?Cd}=H?CHP{XMFm7Uh@V548l3U3#xSw8)6onx=^>4`}~Uf{zqkzUXKaF zl8i2~#-Qwwrk&PZQ4c>}&P(7j(hc>L_K2$9x|SrvtD!5c|_2D9N? z)b97^h24g6cDDkyHWnJBgC)GHOU{^l5OlN#ml)dwG!;t~8vwX5ZKv=>#@`NykmwPN zvwwrjkcxrbBe78tp-f{>DjRKbMKVTjp3tFX(`rh75=gqMiA!fIUglMU(AJ_up<8Ht z3(vD09gpSjaS?EcGY?>V!cq`1r=wnwd{BD;1RabHs?~W@FPj$QHIB&7DVR2kH~~$) zGVVFk+=OM2WcvE%KqJIDME@LKXQ5T6^}H(H;8frj0AdAuYlcFy*he@yqoAUmD0C8q zt%KmD%n!L*9!?1Z!qVI>xK}aY(sly%U18x&$Rr=@gouK`A9N`)J$eKVKla^W(d#G(nEf;~sJ~Q4m zQczhBs3}ANKsljW0h^>j!$SxS*MrEZ^w{`<7x6=N4hfy!Q09>9WKJk{1Wop(BD8m~PXt zY}t%F&$7)qOnQpRW0)+eW*kYEHgRGcjtWU5_mG(V?G!okMD>Wf*}H{iu~gzZKx@QW zBl!rgPn5A>vyxmZLrvu8YQ6$=Fl{HXzzwTFcEh(<>p^B?T?|x{E^;-!yI$d~9F*>* zoNI7=ggZ4vi&jeIs6`@dggKRlbUbV`#SrM`V|!lAQiT97f~B(zlZqZ>ebm)dKd?Tl zS|9K(bWFre!<-lM2C>PE^>7XwcDQX*-dQLK^lF4MD{*~C4F%IT0BaxzNw2ZpV!FsJ ze{s&BiKJS1{;7gIxPq(B%qUi$7^Km8Ob03BnF6vgUs_G1}N-nGu=L6qYONZ6qPXBbbgqwX%9SXaF{#>n#~?R zuxfg938J*Wr&3<$XLO+16dE;OZ*(#yZ7)KYX*gssMXH2@TcTOKRjb|q%8Jh8v`>~B zdK2MrY;<}c;Ghai{fQS9u}G(ShI(DZby5xoFUpdLo`ud2@|8^kYx5^+atfP(8tpB| zf(e=|4c!bKz`lM^Y5gMXG!Qf$-Ux69rAeEp>Q4BsDrX{{1W5fk_W$k?Z%d&OW7$PQ z3ub#4<8CAHYB2VSP~p&O?A9iK!bBitsWZfiZqwnNRL>WrhYfuLwG$8MHfk9fS79o? zUm`O38~Qs|htt#@1#G@~95mBd?m%vo;($mQvp_sLHl2oHQdCmm&VtYw`5tnPv>+C` z87H=G-LMEk7AW!5Oodf#fpfXjoSg8z-dDg(vOvV8PgjtURU8Vxr<*j7Sg!bmrnY$x zas!P{^X8lRK~2%P>NGW(#}CDX^hGpqnJVI9ax$Kf8gk{$6*3IRq!Ixrh#DZzOdltX zpv@xVKW?!C#Hx@5&TDL;D4!_HhcCF-O@ehbhRf*mhQhFd7;i+Dkt^ASj?(xRQ+EA_ zJh%ke;L@2ayZlAPk z;zS~f7GLlh9a+BlRmGzLHOQSPk%$99^{`--Y$1i5HfPQ?|M=bwR&o+*DBp^idEK^=IcRpFGh-2t;j*{N_N?RqDB>tUT#G)SW!t_mk z(aEFwM~6D4inxvy2l9Mq+~AtnMZ%Nr79#+t;$h0~71G0w6xD4?^pOqDg3#4>!Yoar zw7vZvEve`Ea#9YY*hN(-G&l7&DJqF`WrDR3SX@`MgQ&zD z_`>%BnmvdU#X?KdVv@KEd|~1v-P8(Ht6`8WB>-PP(nk35PpML~jQ3-qWF~M+{TW#{ z@w(ztJNbv`=IKXKHk4N(NG(KtBE;tm7DS)w<47Q80Ijb6l@WqCu}R|>#Sqy0R2qjy zb0xz-6{naG#ZY03tZ0D1mcZipNnoV7)c%x#ZiR=n!WQ**5kGC43`6+2YoHGh8||}7 zd(d&OL(`;I+3uuEOWXpuiGzr|4UF#tw&PEtG`3V;^2YWmp3iqLR51Er1LswKRdCd*Gt3Lxn4Z9jY4@eY=&sbHQT%F#RBrPL)T zT*(K4i;Ye@)8tCxnNBOFOO~5q3HCg-3GvfB(0;AIr(}>zMFJuK$O<6{D=e(3fMEQ*B0-dK9``f3rqUZYzX3A2){ZJ`(rWK%l2oeJG- z{9;&ARA%zx2y8s8i=5ns>T%Lnv+Xm*4&k#76QOhhR*Zj1a-$6jGOyipT71>Y`g86HK z7Zubg1y00`Yi;setN56Anvwu~8DS^jtTDc&CPhe^!l;sLn;65_YTU66xb(t#B}3EL z&OwK?OPB04h?H$25zS|TAi$I6Q~%ki79**~qyE7nIK^vW`Z$R-2wT#O9_C~LLe8AV zTCI2-iD7SJr%WK}4i^q0r7&x$`5jvE0}>cfU?Tx`b4qAy)vL{BT}G`caQwn>)_rP} zfYS4+uJR>@*$0@3+pVKm(BL@&U(Nb=PxxUbY>HO&9EcM1?UP7p6Q9N%_Bi*h+AF*E zzk5Z`O;o^}juFmy^X}mMl{3@SFJBwC(cqgW+Cgumn9t>8sA9jggZ$ z#1*kyl;wJD@XI7-3-!!;T)oqHSpd*54M&vBaS{6ErT{;F_ zU}F7Ai4_+OLoRU}2Si1&P-O$sJFp)j5uB-HcJ2nTr8-*eVdyU58V9}1ES#UJzU z8D5zs4PbBvmyy$%Kt%yFp$%uEZ{YOb9GlVXS`rs=0*lBDulOX98H^KzWYBRv(Z<|% z5iS(yPy&=A@QF~(K5hE*FyGVjsB~U&qR>7Q{aJc75rROJ+%VzILC_~HoS@ko=T)T&hPet zzy;NocAD@FxoNynM~SAwQXu>+0a;xMZ#|+qN4ATJT&WfOAuQM7bvC8(y8HdF7s3yk z)*ckmj{}=S3!{T3vq*j#ntDew*jQ20C6VUCr!R^VE-V9i5^~0p*iIC=R7T<9Uze$Q zY+FPVKfIY>g&*r!pH|_N6>5PCaQ26#95DRF2jB%2A<;y#QLA5yHQBXfG*pD$G!(L! zDPyp#H?fS7bWLkPq~MG^2FJNE3?u+1^HHrGV$p5@0qIOTZ>`Z{B;0tYc}5H3bV$C7 zaWuA-IgUOOC=27?jjwUo)D0i?JRNghCq+(0xM>(j23+E$Yhh|@7%g}h*IoZ6T>F3M zG%NvSwsreVTC&q0>iZGsLg;}XllbTM8jZ+V7lG7+n_d^3;y;B z7*C2?puz;+N?X#!ZwxxYra8bZI+N#{)9*%wf=g8#zOC1oh2PkVHeEwvB9QL2S(!9_ z;x(~>)l3?Wwuy1KQOrC;-4G)Eff&q!G`BJ!IR%a*otP#@c?T(u(Z(#X1oZm1(%&x! zxq(^qV{E#Y9(^s8?jvu5FTCKqP9*Vkctw+zdDL;C*oq&FGh}-0*u>a6+k}Q|x}x<4 z;$WdYCi!q|=#OPrZ=zB!v7*z{PK3HCpDNa1n6JHQuVOD0geWx7l0w5%tz4e!K*Y4C ziEyE*C*37xGfKMcN6E3RlG;*i?~P9KxS6Y|FH5KfEuXrJR!I2-93mf7t1p*5i51&_ z-lP>5?#&BJ5D187MoFE9E+iRU%l4&~%9rsUyW|CpN23h%b$H`KWQwM>MPf3Z)jvOL zeJeb8A4XM(`jEhb#<@DIeP&w}RxaV{P}(r&dbO90D##{AR0pGS2Rn?~pC&#+oEXvd zZAK}{aF&>rDV`y|t_u)1)JytW4MMNYP$6A6=9X5C9%?r@;_I=SN1nmn-SD>$*(*Xd ztZ3NpYaBu;;GpqraN$YWEt8>Bz)rMktrL8y0L56^GPqbtdZ_c{-8oT|Si=BxQ=>(( za2*@_cab_-*KMRc6JZ!3s=qSykNLY~t zMTZplMU%8^xK;uoF_3p~aa9<@6E|#3uuX#nGBQ>bCa(Vd;W{ffPdkI2nu#+*zul0- z#Cpznw~vhUryN942mzEDqOAeU0r;#mpQv_HBU>UELDJa?ZV(K48_+gmYAp_!lJOW@ z3*yi+tte%G`XxZ{2Q>Crki`rZ&B$m{TPlMGPRGGE94apqbGx#}q2|IY3O@rn<(N*i zX&UOi% z_u>wggCjy8OoqyofZl{oWb>%P4(`gnjJO0#6wSMufTrQQw6{f;ph{OYtz-NnkgKC0 z^G+tms}!|S|DfC`V3^bZn%UY(7S6V`tdcr_p|w^(c1Pr6hD?P9&z%!a3H9Aa#WT$W zz=NatFP?U$`y(o{UPFRO z$z2(7g-#h;a6@GgzXRxcge=o6gW&0pi<@ABa?`}_yKwGm|7X(MM1^dY;?`LtlYP$E zU9B>K)KN|e_%u|8V)aP%0XGV!tekLLr;w75Pdruj!aOh_gml6e0EVfI@K{mUT}1-8 z$3XU$!Nh$BA;$!O`V|YoL~K}IH&K<$uZUZ#?j-)saOlPTFh@3yqS)-vu*J7S#CA0u zN`i&8{V}svCkSYeiFe=_GNeG_xdPuDN#8UQ^u)$M=4BLqE5AHW9}f+9M9ty!$zhnI z7vI>rWbl28H*(;9$oRlIKf~bh#q_&nnQPAz>2XY8P5cJU8GgV2+(E2Yar6`~3p-~mdSwt+<$ne{K6>pl}$i;@rXI4C`khvS~ToJd!NU0{Y7H=z~ zjL0mIG+Li?-D=*MOpH`n>JU={dlFZT0hfxty*>_H`CW8+e1PAcOX+JimtS4&M=AxL6l0b(>!>rQ8g}JbP_Z(7?R82UjalTbXFCW!P{<_U0LB) z##QoE8>$X3y02&Z919(jo!_b}A2hQ)I9?I~5Dl&keXn;0bvks)_$%w5FtGy!In1kR zloglCy-Yg@*=R2-m=;Fgl(8(##4vCW>BD?}8n<=|r%6tRbMH{RLxPTznZz*0dKlFV z7R`^vlF{B9fR<7Wh~AoMo!iBQR<1U8+Huk@>8%- zZ(rm%GHbVNVo+yL7)A}*ntH;4z>}ect$L6pSPr z{ER(O>2=fz(rbdV>vUz5Ow9f}D|(Yi$HX`&0V4y5hs@;03^lc5^6!bXPE2_U5O|Y| zfXUuoVZA(c0031Qxsm~a-l)tX#{z7|`0Wfr%d=;%-_>*V=x6UJAh1`O62Yd-H*?`* zK3c(DMwV~g(Uvu>nq~p%O?cWjowfi3zdpqsBAryE#6_>Bbn2$2vH@u|?=xo52#(pP z0jQT1fOP?CGhfTLj0u>#kHCgF-q~c>jX;fWMM`$#cN?~n08)pgL5!)3o@Xvk(}K!G zWU@?QXEOSR9Tr1>6Y~+nl=PD?$TWN~y<9W@kp0V8_K(e04M4X$|BKni6Vi`_O%z01(>I7~)?q(99@*FHy z@E%Qni^B`ziDb(M=ppQc6*mNfUgNO*R}Y0q)hd|Xc9Tr8t5Lzr075CB21XG(?kZc8 zHueHQ5bdvA!{wF_k$8a@%r&S}jR&ROytT1~N-CFsDzDYjn3pw7^6Sp$@xdtjPa6I| z4Jb#4QY_YjnWKs<13{002W{v`2jd zkS^_@Rf}VnS>y(st_d}0o=Dx|l70=jNk{hnQ4~- zVKPW@SoZHy@zd5Gu$8TLa)V+M6_OXAfLJD5fS(xGV;@V&4h6dSL#9uhj63cP`Ku1x z9m_sY%qzQ(swl&9XQt1lcto@yE_%2p@uVx$5y};~oUIhpBDX+eugfk;q~R3%=@qGc zg-cAP0FPTc%|p6j&>PQ{dAKzg5D_z6zSV^R(N+B95KjC}+&xwM5CR!0NE+9(9 zQm%EH1fNYDF-*=lU(+R|!dvuIb^Nnr*g^ZEd3t4WhmR_dMe1YhI0POG5wTRTRV0K^C;3potbuQGk5D|qzGEmR1-02R>A?8fdhxvyg2<0poc2yQ#F{l-E8Gf`NF`>Tz;iHoA(m~ z2Pq>l`yOLr4yw{5l?kRAr*zqL6scMEVS`gSC#-nawNbSd+m;H02z5_N_ZnX&5Z?50 zBCljcZGyWuD?t?p2YXdsg$7rGT> zUMJ_A;virq2DVaKQM6nZ`#d6`0oN`q?Bmn@9I0_=yV>_Jha-7xUv_F6KV6++YosdL zc6s0$ig;15$mE$ehkgO zvmkWLX

{BN&aHA=cSSEXIK@R)qLVUx0I5=&&X&y3L2HEs`V0my7u3Imh{pTX|@H z345@$V-O*}`>^5}(8D#&t$-s{mD`bn(DAS@GdVS%hJJ0RsXzSZe?9|oWUK?GQZ7?M z`$#F1zhiB}L0Jux^<^Xz|3oGOSXWRM5g&&7ZhrWcYZpxmC>~cMSV$vqiP0!gtedPS z4hks+(;j8+*Z$Ns{|`19RhCU0X)ZDH=VBwnH$&y~Zc~^Izio9CemA7`B*sZ?9*3Ad z30%!PSRWGm0IsgAb0P4pI(RD1?-6DYc=5uJy8;I9K=6m21oOPly9H+!Z5igBNwhSV^*L<~w&$IO@(li!V7z8HV=uVCjM=5fog_7>2$W<0 z?<|~)^=ehfi2@>f6S^EGjbLQNigt)|~8A#E0C zER||)4#v6elZA0>37w_uB~h$q2|uiagl?(H;tJu?0lrnXSvV;kC(^Ir9?eHlWk0K) z&x!6e4sD}TQYkvcsnHNSaQi4`1 z%H}eV2-3XQ8+JaiD-yl3IMwk$Oku?uNpU$?8tH3pK6&atW$7x0c;tBDgX@}d$ZJe5j}PEWFHG6U=k_}JQx`3_ zS`hhAU)k*e!B1ZRK`2p8&N{#a#{-n#YJN!2^MCu3a_}VX&o7rPAw3pVbUyEpX8Q27 z_m9QEP{5gmtqq3yifz2@D6$vK$Y|bNIFVT{P`(p8hOVSD58?IMMYVAB74yU4?@EeK z`E!>Xem~-D}%0uBys?NJLzaQi>@_NkWtQ(tGcjQG> z6?dC~19$MKADw0wGbzoIHo#n7pFXGVxs{gXsFg41yvZnhAM28>L!Vsd6)%`{XdaBX zXY3!4>#@ib1*)QG1TfrWoi*97ptp(~dYNe;!IGk>(Ep%@Ea=%XY|OHeeER2&xTFa) zff3#^6JVozXl{bUBz_%u^9lM-T5j-ABjp$Il`az_6;BK0`DI}!UYR#gU!EybP(dQ` znZ{hMKizowY3u`N$Vak^P z?O!ACcZ0wnba?0gz^$3A=w*;;M8UBwEEW?AgIzR2@I@>-2%Y14bfzQXjAG*OZf5nz z%^KN59u9mO4!2e{i@dsLTGK-Evb-Nzj$V;eUy=qKz@{#G1``AMg&t#&tKHf)ackE2 zqbt+ZnwayuCme~_Mr|lGBI|sBdN5z))*^|6Ut;7IX+_MjkXae&FDTZWjF##{b5jd9 zFH^9+4CKXv67Vg5V8`RITS_)RHT~z| zHqqRvE0@RibKA-n%ymH>VVoBm28%M>zI+DuW1`07MDE;F4Q>O1KN!e6;zQ!Pu(3Fc zYu?O%G=E^k+Jmnpf3u9MgH z2b;@{>=H~E-Z(`edPG$+MWu(SP3*6iOM>oIBAaU1d_uJ~=&S2%VUV>7mMEow#n9N5 z&6s2#M+lK==IczQ+<3(dlXKoc^l#J9+Y7petACHBsY0?1P>)VV`c~%Hwt>WpP`+=c9>C}St5iU8*i?T1WP@Y<^PU~T_ug5Nx{xPk33wuvr7 z`NM+40x|1K@|*VHw_PTYBnz?j8GP#4gRjA6fdvl5#a!K-=8%ObZfOXV-)UpwdgedW z3|(Jd@lQ^9I{@y2ps8qk9p4XJM6YLpXuFK$%SROjL~B6T)kCJd43|?q0P>7+m}>0J z_-QNWCL(U1Ku2scEE3i=%l)3hR!qChb~>koWDN9=1xNZcJjye_gR85|o!-RYcN}pv zjM$kAXH}#)Z`=tcV0Sc?k`*@Fo1OeT3rr|f!F=g1C8i{P_#qHB4!xSo zxDVBqFR0~hV>czg)sTaOMSX=2KZK%BjA>Ly_*a$jp&z~{k53AFBbP7N4CX7y+9Ht&3mQ&i1jz_pw$?_lh@6_X*UH}Vc{qej^LhkD3x zCJI7_{c4l&sSEtwd#>4u+VH#LHkLDt6hRd%xC1t;{FNVQ_83c!NTmAtLRXL6CR~$Q zv!43TmI1S`gW(z-kOUJ2Z)vw<(ATx9%wfP@qz7YJEL+Z+s<)_!{c24U(dXkc#Ky$I zrJ$76R4Y)0mODel8$aeppADcIAC7=No{MWQ*dR1MajMDTJ#cA;sY%i}gemV-F$nhn z{|{(pcB}*x`X9%{oQcM4^bKG6BR`{QT#B(9 z2(hta5$Q9St{1MYLG$_15Qar$@Y`izZAT8CNLPd>CfxE=i?{5>a|Iki;w0Wti%|rd z3EwNeGS$EvQn$hWjO5PPZ5`_)O-`YqXFTG~E>kdp{=JTL9dz#*ofdz8eh2jG?x>vk z9Y#&f*G|?shj1MFJfeoQ*BZo*VGsf6K<$6{P3YTp1M_C3NVaQ6b!37^TQDi75io`R zG+F4JHsNN|HMM7h=W%N>k#CIM#Hon}qC2yunnCKD;?mR z-x0ulTz(`_22YV+GXT|WhH36$A|t{1Ju=aam#K<%C>lF?txYJ%@_g9@510M!&WdU` zD-+MxRXMzikhsl_-mOHoIuxitYNv;nPEUB!} zc(r2>gP6cf96}IiO|3C{MT|$NFS>p!gnl4<9V!=$-@Al*q5+CiQOSp zhY1)jh>x9-4Aj#I=PpQDh*=Sy$CGP8Dg&!3v0-s`P2av|mT|Y)IJai_&B9SMWFIt{ zH)zbnNq}|mv>1jLT+@`94Y{#RLU;5GRc9CToaR|9!)wY^h*cvNo!BM1z0!&R$Cb+l z5gXlL9d^*KhQzcYHj;+PD1w+vOFO*H&&x)}WOp|xg@N%|1!rq>C5fZx8cusRs}!}m zZN!xz^=xa5^gJe}4ioU3 zpRnhPB@kgIgujfh=EWSqVSqOaHw_XD0ptvEGT}Z%*v%EH5SzO-+dScT8m84O04&J zK{2R<>w-==&}N6dAL$f%-io+01 zF-n?#tQCG~rBw0l?>DDt9v{2W-=ZL7_I!mD`HJL6X?qx0v8u^RWI2H(^+Az?s)4M% z%s+}V>PVq8%~P*9iqY87B%?b2t%{RnLm4?c%t{YN1-`c}A(%M`^M$ zkt>4fqCj_-`K9hk;RnAvQH=(b)PqIl)C6W{lGR>_t7rpx3x3I?WAm8gQzDxns!q2X zHpbF$CUTn~d!_zP!yiF;=-20%Dc4FoL#CHOJ;+A&+PoU$Lo$zG_sLp#?35921A;}p zqY~mkLd(pLJav)Q!8{Y5e%UO*Vvc1p04k6{YP^nS_?X%nG!$e9&Gt&)U|W-1Jr5F- zYecYC8r=+GAnBjmShJ*&22}uDJ$7@DhCvnNgTlh0aVWQ5F8ejlmu8^kOO^9IRDZx` z_iG}UPaG6l&2!kW49!U;MU_@#Vc{%ugIbVus!}=v;ep$YI&t<6W~!?MT)&BnKL+od zwfcqp@?EVCckpSD1RHNa>eLa6UU#E4=-~yFEMaACWXp_(>*_!*bb1gvQDuXz5{mB<5-IMnPVMPsGD+re)h`??K4q^Jf9qIJsN+AHFw zYLcd26~1hmS4xBaZA9x<^CG_e!|pkiyVxr!AvLBm>oXSVC^Mm1`G93XHA==wCZ_1sn3FXG#vM}1?y~b|4rgU0ZMN7lVEMiaE!WxqGSTPe4v_{;&2xiX_!Sz1doSK$~Z4fj8 z|6PVQa|orR%Mgq(4V;~+3f2w zID|1XO9sMXgG?uq9ze+(Qo5N8#nO~0Ed!Yg3T4|4X9TnVYR3?3*?p=Bw%dI=#P24n zD%)l=c(8mJ^5LUzz-1RthtPfg%c-# z;8ts$F%RmABXTtgjosV|WEx7`4o+LL3}OZ}MR7`uOQ>c78gRYk^Sl|r zsO`*ao24bM6`$>a+3cZ?6qmk%7zIqrrrt+xaFsz%vb)yzz%it=qGp6B&HU~tm`0?= zctpbKp@H}+7KgcQqU(#Z!>WVF zkS=?gv26q1DCHSt4w~NbQ8g7i`m3{>J)*@>*Ku%)G?TvQcsmJO*#FYW=&Ym0k~iu~ zp18dDf+1`q6C@_^uVmtBE$nga#9{QU;y|pN7Tfv4!jYV*SYErWWU)GxJL0P;t~;94 zOTZ!_9wRXe2A%va4cU$IY`s1Z5t*4?h&;%))X2P!`$3uu->n9S95$S#T^Nr;L*TB5 zhsYDv_|(N5r&{JXQ@r3dewT3{7QEmT+c0-~Ln+HlnVFo_`e3d`ku&!%xJR9|N*2vdb*=2{BS#=&j3Nde4eJ$kQofgC#7KPV5)ntjnd9Iu>SFI203k3`jMu zZidO(SCh1^qJe~<+@3cvN+)W4)mkqS71FfOm;lv5*10n8{fE=|&{@KH-a$~U>((wb z@0I%k>yZBfU$+j%n$1?bXzKBDW|H597!8p7}&{ALDsngJis_c8)l zqG^)R52j^f)pU1|Jh(?pwk5OG*1iB5voRFA#xd24#PJ!C2|UhH!zD&LDCZ#dG4Xfe zQ^1r|RC+ona$2^VrLTJY*&YDO6Pc{?@T}ZVf+A7J@=wu4ehP%FUmX;twZ%9;T~l^0AZtgk3};P7|78j+k%kM!k$bh^i}v)^qtk9 z%XipVvut--HeXJ)V*fK=&#!AQsJl?GgQPkrut0uBHHXzVlAFV_44KVq$eLK@=7TlO zn*$U1qnTLaO*iHyhD-GSC^87vUw}L!r7Kw11=8s zG2&7!6LTZ@n~wtVduZcn)h46tA`F8av$?yR7JJ4HarFWCB$Y!qk8*lc~I zeQ`VBl<5d3R6BS51sIN)y;6bflqTbFJ_z{xC0Bd;ee&92F#ikRqjl0HNddcwzt9?} zTVsQT(7(G?wi}+_O(0;;ymG0nZTf)JA?pRD-xjR<9O8Y{xi9lK*O`2YM_!PDr*sYL*i3O$O5&a^r0l(ifs!QRf(~ z!A`lDdivEX*!eKTLp>1u(Lay4ByuOy?-I8Qi*vuVYVoLBds!(*p3b9z^3}j%MgRi1 z!(ydUq_od$AM*~XFhN6lpx6>%$NrR&(&c8Jh2Mw_(RUqdR9Uj>Iq?wF|UO<(_|zYF*89BMU$O#6ARGFqgX+{G@%k zEwy0|HHut@8Dur<*W!Z}TT-`mPJ}!~i!@^IMgHBSV#TqfHA?B}~7adxF4(>t5l;jm}omb0G1E1Zl8YHWJ8wa%T`*gj}XmNST@#p=QZOwtr^f^SPR2 zMXS5~qMM0)W!W~$BJ+q&H7UKA$B0G_nY`aLkrs8h#vpX;`EHUubH6&{dH3?+3G+WP zmP{;oO(%iKS7X^8Xe+r{DuxG+{gEfVp^d}v`}mMsp81IRpKCznV;G>$aqcDSqs$bu zUz!lyhvwuR1v{wuNwDUdai6)x#j{)T9nS|kpZ5K666npVhVz4Bj2EV0PbU7%UoICE zc*wW*v_#QpM!FSrY*%EjlF#m>NPdOVyt+aG=RH@MYG!+oKc!WR>%zzLUN`E{Xb{bgAGjAL@~fxKheTo6 zfUlVr9HKtZV>ur$I{k7DwJdb+8JC>;!?XF%W_f8;-D%!Vz?i4{hS_SICnFg-$=F7A z&1*{%F|+Q@(>dZ`Jfvj~XD<-xi`&EKf@opCadvMoWKG70F4n8D%E{IG!Awk9txd#K z^1)sB^mZ2Zl|_R^jJ_SDb(>;lL%tU={{^wrHwDv2G|o!`iLFVejiTT{e6Me~2PqfK zWp$DMs8hd52Vw?1^&}7^_t8<3HA|VtWLSGMpkMOpOrgWr`E_HJADjdx=2a)sZCqR@ zE|vF344RuvMoUNCGmM5WK3MD8%;@CtQDOj9ep2x|V%cOJZbGg<^0)+{-mH;ZE0w!B zskc8P8e-95evk?XDH;IB))hM6V>kT`-VqLBW~@>XZ6{OqZ>7S9x<>0P!3NXhfc9mj zOiT^_|0UFexFmDVE$wi|K5=WwSY#z+q;z{nL#(Q{W4_Sjwv!hzfBCwye8_Kwu~eJ{ zyAJ?g#6I(Nh4UPw-M{4~Op&7+JYEI5jD64nD7vD)>w{CxCFZoY+G}_jJDRp^97W&@ z`emX@N_iw6taBYK$aG0e`or<}?+Qzmj0|!88qi@BmefD`YGT2ofasLMs*IFvB!=Fd zyf!vHdN}mWm}h-$koIM56VTx2Rg8Ho{I4+1~Wl}>7WWlZ8BE#*Sp*3zdzMzPCm)D9L-kLLUN<=7>v z$l+Vxbk{drFm9fl&+jsYixVRXSu%5~4O0Esj4jxn*v=nK%8+?sU>#4cz~e0EMmJ+Z z_%m(Pcm}Jc5pLA4Mn%ss&0|F03HgOBgH~+fa2d>BN=`ah6|#{u?b%~##3e=6?>s&@ zJ4hSNAapt0^Ekfp{1`O#;)T-)&+8zMs#em|PEW2jaibkKFNv}{b~tHyFo|d!UW10T z&JIG(PVsCA{DV~j=ecs~X+5tzKh&%Rt=uSiN4VR;j!EE?_d_f+co_xvAz#k*5`LUG zqyYcSOyc6(cAWkymW%?sqg#rVN&qSA#df+LE0xU zWB=}})hRs#>Q97nsuVRFKCx-1iUs*y+se_Bj58W)qI9*L{)8$ZReRM8rmzp^pso%= zYxOyQ&nQYNFYL(qW3oB4N?wUqpE`C zJA+-+;PJwNRG6~7Ff_Icj9_D~5T{ay;sJkG*`rO-pyfPr$h?dq=I=P~b{dixS*Q^8 zWYxg@A25eCn2as)6Wdwe%4Nlp<`i8a0tt*}vgv=uHm%Zl@(!wu+N?2>KX8lnW}^#? z5B=8{9$u7HJJG61y<^DLoX3g7av5PfWp64|4ch>~9={O45N#<*Q=Q1Ib{dqMALp)5 z;dfwTFsrSo)Z#>3X;QV)a`%alxC8zvC!57`W2)%kcJ_paz~d&yL+}@iurrp17Dr`j z>gN^vn7N-XCKFZsj%75fa+0JI5%tFIDy7+Pj%6{OncLE{t}y@FNSuI(X?7B=8bivl z3OWIOv5{j(A(*Gdk@*bIF6KOY%4ZOHQpYMmm149}**{+zyW=wsgE}ffaS;c`AX}$C z>Yc{p(^b6VMm4sjX=9MB8gJ<^$!gF;IY?7<^5HBYn-LqFgXjHj%y|`%tGC5z$BMd1 zJh_^LW(op;(R$(%Gh~8|TQsy`zC`9kWA;jQ;6(F4MeK>2t7}c~c-kAdt_teD4o(9_ zk02{53e%&|fU!-sjAlY|iEJv2#%8mY8z7-70qm|kKE2riVwC^5Rt5wa2z#F=E2ncsY;^86p9bjPQi$NLFl-J|Ju(6ohTn&R zzCm09NJHz1USp$Vfw#)f>dchP#a}tQq-;p6W4{0XL0-AXa2S3Z_-q} zAPaE1VIOt0UN{R$b8R{Gt2W~_pN7+t+aQ9Rod|>n zPG(#9ZyP4t#O-1Vvhe~vZ2qTeW2`_!f?Qp~kNXC_{s528=4a~ZJXcb3aSk%K{f<<2 z>de)K1=LS;3pKl4JA`f0;zU}@&S#sZ_HHhYNl_v*G23xZGO<0{a0g#`wuAS<4G8&$o|yp*rle(s1X9_ z!5rW-#N;`@m$TlX6+(iwjGZ8ZXD++BM-1w4)fiiET9(W!b2Wi~+y287X-m?|y={$Q z>rw0+G;IEO11mg<)ET5IG@FlM@vfQLR4&O28sRel3g~g@AM=&zSm*6)mRiKSB4#30 z>6q=sIdrInZ!iq~LAx2Y##}+gLFS{XIsIupLscQ0sJk1?rW=5JQLQl;f!c%Hlr@+* zMpTL4b{lMcTW0~|Tm@}yGxBg-|F=)WLkRIGeCBJVl+{2mdN|zW=WB;DifrOYb4+iY z&*(o>QD5{63HvEcrf1lAvIRqbw&5?Vj)0%>Y#C-V6;D_z3gB^Aw+geB;3?50 z8kWASbCpGi;o42&Vjrr+!2`%hMoLGfnmTS!uf*Yx&&`S4I6DWQktx}DQ>NBL;-Qv8 z5Wj5-TPn$#DvUg8Nwa(Y07?mzje-WClweQW@x*yI4VZJOg-;3l ztirO4O$E`R5=r?)05{WsRT_Gjhoc=6t@7csSvldDPlIq9BtIG4 zEg9IeJ_=wHB~hj?$)%>bJCmff#wMc)P~t1B4Br6--h5vhmp{kG(((m?m~h)9Jb!~H zWb_^PKw9|Q{ z9YaLgtDBs1e^SfXV&WZ;RsRz$H6g*4JtZqM$aurRp`zkGB+*O1W(ZrdhFr`7R3hQL zQG%{pCTy-&6TXju6(22Q8VAxS|5D_WhEv$O^B4*q>X0Tgrir#QJdfkGyNC3!NM>5R zi-Fvp27-$`$c!yQ%m>O6K*FQ(=XbMsA=~jR!8vhVz+X83@))ve&d- z6pX0UM&A0Z5J@`%<$`gJ>^!&G43Z+pQL;FL52a!mCKH!1mKNvRv_{q8cSwe8FK|pq zHpzfXuh&iL!2g@MLVR=ciCmV9J~$>-173(w807%ep?JqS?#(fbjAx$L*PMIzFsUQ3 zz=(T}A1ryT;-^l=(#piUXCuvI|p zEOdQ!VdB^X#z4V8n4GMdfB$31EKWh|E*bE-Kjhjswle;GX=u#518vtZ5kjToF8?4a zY~>~s%chy0fwWIeX!D(CkZr46$A-)<+T}3c zN?tfw%pd1IPCAdzG?y6X>0|INUGFhY>gr|zxKV}Rh;Uw}27y1Rj$1%dN_w~`#lsU% zsoCwCSNyf&vJvGJYw-?iLA;;@0S%}*rKlWU180Xg2R$pIU@Ct;P$*+jTZ)B4y3~n% z){_P0jp~Fr5lpZF98_UKj32AX3{Z0wudO=Z^~u?qTML}7YPagfXUJ?@DkpLrX0VFn zLCpXr`Qh3Rw;l8c%b*S4r(nFMPd1eWz3rnJRXd}s9IdPfVQ(@_1%Ee|H}WJIluXO? zP1sxOHkkk6p^i)C{)5aksLW|ewV4^4r?;9He`}ia6lPC8i^0GdT1iwW32JDP;-i`^NhdNYjCWfAut_RWxLtU#eIxT46 z9IS8p0A&B-tQ9#b!41niMnr50oF93@LIooM9B0gRxiLNTxZ;BS~7h^@IB`D^4+)SJ&6 zv@&_Y=DA;273Y)13~wA6sW6?|b?tWBC{AZqA=aRj_ny8 zb5PxM8o995MThCe@ga{IW;vI<@SoYG!C=v*a#!23bg8D|SC#4drv|c$v&uc^@lhRi zSw2V-7Xd({@0{mH#0=ZW!z+7UOKcHyyTJog+G^WHb495y0_;grz?2U4LKu}L)(2dh zX6{B6N2bA$JPUQWSV9qE^*^cxYsx@^i)ehfVV+1pzT*_^Bv|e<8Vk*MU3a0snq|0K zT3!-K@wn`zTNj`as+CEW+GViBLjr*&a`0)pXvajpn}EMAPMoe@Gks3OFq|>XE1oK+ zPGBltm2ah^3k8&U#Sg_h^|u)BX;^A=j}a2AxbBQ#m?MG!KVDnPhx(|lwP;l5GPqrn z``P6L7rgaEXXj_3dfBp{%FYjbhIl4pw_Lwzh!3)7MjMijktlYjC*dRz;W4a9=aIee z;}44Y5mbfa?kv0keQrY0a-LB-Ka9yS%u2m(b41DDu6HGoc}V3_Q-gIO|84+=XFNV+ zMJGc}QPSyJmNQUnZ5I_%lhK)GPYInk1%ut<-BdsfO<)ijj9JVg^SFervd#rv{_UQv z3VmyqxQ}d9-Sxyc%p7mS#+c(&Lxiwb1?do38>+BP-%#BeIt`H2ECw{wuyKhQS(J_Vzi#u1%LGee zb*j#0Kzh{kq`_0~GQkE0a&bN$3@^`M4W7;K(7s^7L^CmkrAzl<;1FBfvRVcT?Aw-4 zZYuqm2bh(&1Z)Yk-G>@hl_b0AG-M z$*VJzVb+obb(zF){v=TZoJ_}>$hwQDP7LCSheNJ8Cm9Cr+!D_iewhypsjOF z4^TGT%<9K2X8^qx=IB)fs>XZWU@OHKX6zqIfx=g&et|bORol>hTTr!W#Frq{=C^qS>`~uw$FC&Ed*{cq$h}pv>qsP;I!CF~H`_K*p+^#qQwod1tt4 zao`v#gn*?_=up~t8K{`rAz8I1_}kl_c1+7PDTAu;&;^jowOKyoad)Q*zbrLVE^Ci6 zS+UjIPc;)Rj3Tu~*75 z01|STkrC!2P4Fg$AgG1wU$F*v*4tkvi~-`6coQq|U$}{lW7dp#k+(B)}9FX01pZQuRjG}B z!Q8e;>Gr2K9z3y{P1v8BB3<%f47!GagQ~!}x3}bY#WArtg9pwI=k;U(Y0wfbYwH)jL5Jz|VJ?u@Z@hKHQS zvz!X)4KbE(-_^k(d>)`=s1c(t zXsB`AHGrKevJ=oB2!Gl@zSId1zZqu~qgK2i%MO|^lkuS%&-0HVhc zPP~iVm6D8Y{y)_W9L8k#>10J>h|*-8h`)XaVVr}hl0FWmDC&xKVp?Z-hQ{N&m*C&dJkx*}AH;4##{?~B{J-1Hek||Di}^Cm=r>3BhV+!v(0}}LZKD(*vP&kek#92W ziBk^Uhq&E%84v50`_+W7QN{)KG@zdP6)~8feiDUTj(gpvu!z7ih#9c5m1aYQxz}#CYrZD^iwe(8E}Lv3~b>qK?`)onuoOR zhG;WRQEULy);Z6v<`5b(5|)}o(;$yBQIzf!#a2OREx3nG*TDFVEpbDYzatlCr0sp+ zqGTIDE3RT4n7awHXTAo%wsTNAFN_$U6Xbj$!1l1~+Oc zZbzC8B~69|W{|li_d`={{kxkn-@JVNt)_Yu^INDbH3ORnFUT-ZmWoG{!VAG4r1U2u zYYa~1Cew>)JR)2*Cz^_NceU(p#eW3?1Et_<)cP)zT)06E4YvQbV|bROnppqc^wtYi z(b8`k$*YBrOES@oi?atlaR+eqt5Z&+|1%^YJ>y0#hK%IMzcQ^74ho1jewMYf-1?M; z*M#zH3{I4k#1oVg7(pt++gBI_)pv2)Mm8CAYNBOa1bNmDC!438&e zJ`LDiiMo9t0-+;|o6*i6P#%TR)^-B2_k@=FcP#pxa&2ehn$VE+F0-Aq4dj$v^csm`XLV0p#GAi=pwpjp_96FvkKY#^B-aaq(d=1Wa z90llotisX+#-+o%A*HX#?F;1crf~-g$>?n8uP%Zn=|br)LBLckf*P{D5O=_sHMLvyT&CsbqL4dW1aWuago$Ii zqO)miDl2A=UF;os5mj=&#t>uQ6=9f28;Uw(q8TuJi8c6NxwZU zhAWJv;f|q)8viKG2r%wAfF8bBHcMoX@dQxC$ApY!_9OQZl7^(#DNS_rUwT1op=scx z=K45QN=rD~c|cxu7oapRsE$f9=tm>#5wI6_TLd*p5$xB%)r+#ok;2TJ9ddnWfSBRl zIgjyPBMOHDx%uUqo2c}Plf^3Ao%{@)i3E2R*7j+V=8Y$THiFAL)-``;&;kl^tvNZ( z>T@%-p}WS4YTkXdV`3WDwd1Ng)!Q0IO3w#Q65*5rcXsM3PPz^8V<4x(y+y)hSu^Y( z&lgl-W!d!5vLiM2&|!p)TpxnHnnO*&Pk3}7Ndq&OII-6Ps$dtfcq^sRyX$iC<)L1g z*RDr=PyY`nH)}qeRjS33KqE&qurUgv5v6NxE5*1OAx|^lC{I=aGjmbwpFm*b(?H#4 z){Gg{EkWfSn@Ya0WmCAmpLQi=Sk6sajWVAy*8R+#I?k@n?9#*(?MUxG`MRlrRP__Z zR%hX;a>oGiz{OQO!eUkl9aeEM1+5jZBNIK{+<~d2Hc=f(+!vkEJf!T17Td1TGlx=vZXpcC5zUv036Hh7zR6> zyx|t_z+llDDKv7QFMwr=qnlxXa=Wase54U#{F*VTAe%X~@q#=4RLMW}_7Q;S2mHU; z@M;-A1@4n*DWc!B+R<=C#y(@DjloZOj&F5Wx)sP8C?D!i`Z_h{u>dp(V39vix}^)7 z&JHiZML4`RpRw&7AE#LTz_DqDsehUN2D8w@o_ORQ@f<)af5sj4t)5)qv&no?QeYqkN_&5IcSdC=flpWHd#aaTWCHasQmG0Kv*C3m46-|R ztV+yqjCD^6ut1NmXL86GOEXNnww%0!8N;dX%c@laGZPcB0tr7Tv8p_X!AZ?#aRgnJ z^L$|Nu8`B*BQRvD&r!>iPSB?3#735M%g=f158$&9bSASRlBtSm-KPQh z#VF>&XJx}Tv<(Rw;F_utdE5=srSTFnbNbt60mv2}Uy8s>bHA7pObmdINlyoMiEAx6 z*s}GdV~F}6i^r#@=gU$6Q1jW%;+%&l{0vah2CQ6kK z5Z%h^3KBBDiZ9#E>>^jK)TWrItjswhTiK>CrzRu!I!coRKd<$mx!4jdR2C{AweJ*Q z&vQ{B4GO<;{}y}6qOhH7X zO)T25c9G+Kw|*4Ws4lw;%qXEc#Nj5 z=FYG>BZ?7aTq6B=dMiFAYhn4I56NYa8=27I<`P2`zBT8$u*ayFQ9d95!ChVw6`)Mx zNxCGG1xFFM`Tg2itf<`jMAL^I+vlZG)k#vt(ZI!3)mH-g+~`F(=ZL_5mE!u{bfZXkNk*?+Gx7yCGSLp`#E*gbNE{i|ec~j_ zlk;{XYz&-%ws1=fneJ)=%tHYpp0{CbCm+clX2-CHwViUae1)@P_>!z4Uoorh_n2U; z2tk)=(cyMujZy6kRD|QnH%eMXa5&_`S%`lsD%^Mjy7I4GWFd6-i z6yqH!rThI>=^drc279q7h)7a~RdL~EXdrmgc4`nd$~3fD!a5`iw?@?jy%C3(hH|r$ za&b(T|0x#a`O71_o7YxnwdMNCf6h* zvIv%Cdry0SI$Z4Gh+EhIiiRCg#CJfKGZ z9R}s{vv;PogPM|C%f0`VcXXLiI6k4jH=f;n`juSE2JCExIWIluhd8pBm>S8XPP+s;hw`>+ssQ z8V-SAd!1&%_0K~sg|I360mt3v=x5_~2H)z3LC>99&ONH;TDi4M2}J~vs4-@?Hl*A7l=HP;)py)AN-=ov_2NglrJ4fGkF?X)QIG3ijY?8Gi zbm&TW&0QXaX}!gD;pXO{CkTN6#s`{XgBp?OjjhqJ**QMg!o|!du4T4Q^)BX8SKeCX zp|&Ako-1V~#JkH@R0G$iW^p>|#6`HZ{Q+-$#LWH89h69y;jFj=48=wq)EQFFGis>n zYk-zz$khlxpjj=BqNYv!dxm!omE!B~p;$2*H6cJ(KldxSs@sgf#K1uVx$HhOm%&g8 zu%fL65t?B$<07gFcn8- z)K?t7Wqw}peIiC2P_8sk183XGUt=hq5FP$+rXSK>Dnzrp@284f;JJst{J zwMiM}{suF>AbLDw0Dh_o%*M2tpD{siC?v~>Y(e0Ujc+gv)RUMMD`IeU4|4n~OL&Bf zj1{b_t<2h%%?__=4hN}v5tkp2!@}tF)<{&U^aS|90LVr|WxmY(pmkq;dRh0M(14+F znWtAXWXjxNHn}c9fyxi>JAMVH+Rejjv~JMT3d~;wvu=UqB)gN`5JGHL4chZ1Gx|5q ziw}EB*q#+(q5M1I7S~J&QJqf@$UOa>j!p)viq21Q~Dx*fom0a$$*_)D=|j^8{Ljkq(-XA)qsME zE_C*HY8j8Nwfn1qdFO~^geiz>URyS#+_+IFv?n-IR)oHnqv6NAXM{YdS z+ro1wH&^(?W<{rkzrnO0h-RWnB}avR;y0lJFQ+ra#VW8f8KiyPtxL*CrPZkgs7Ig- zcTnZe`bR@Sy_&IXSU+P1xhh*jn%RtNKsg<%q!bzgw_yG z?vvdU$rOBeaKx)p`sx^LfXT&JuRQXWqF9U(lFJNkUV+ zur>c-jl(peh%Y<0&61^ogO&}+=*DBDy+L}yLF~ukK{UW{vnlMep(h>uyd=OgGgGRZ z!(oAvr!knm%*wD)Fjd8g3F$>;y8#`064OO-prIB?T=26WQ{R#q6D$BVcT3v0X(DMv z(#GYFWsv4YM^JRtZsWr@#P?fG6;j)zD7zYHk043i%G4U|=ZNT7)nu@B(tg1bs2MayhSmX93oX?}NDJ{)-{(B{CNZ$BT5 z?}u6gdDO_RWx{Xf2al>RgWW2Fbr4%&G9=hCVaFW zYA;69^+li`nAwCN##AjU2fa@J)`1Z~Pa4V@GU`Lisl4)-@}2gXM*fH_aSe?)t;03I zaOP1as@0IP?h`S?rkF_$K>f#U#pR5Uw;*svo5m4ZXk~iB_?YMk!J;$C+^@Vd3)NZ! zN@-x3s@kUc^2RCY-6g$sX?$?}VJi_QPPwLhr?n+g5M7zRjTKf6Lb)=7R0+AbZ5KKK z4FjETWQ9~jBqj|KTYQ{Jxypps*H^>|6n;_T_F@UIP3k>^Bik~-z^P72?z~g6wFHhM z5Z>ryo<$v5Y9{VWJlMf8J_(2Fh(q@~`uabWvyG8upkvgIE%*s8jiH9}=hwKl%p>qXqyJH2&^|%Kzvb#%%-&gD+1+OEwIGT>uIAdT#Py#El^EzLr3wdZg0$f`f9J(&xb$|Q2dCY z_%yKlY392Gs#IZAU+oWxhxV~*D0m3Uf>X9{b;8q0x%*J3MR$&Mkr%UlrMO|d@Y#TNLWW`uo=l0E{BG60spU7hIRGL zXn6j^TREKPLxngxikmIGFnB9;{!=2QaHS2nKy{AHntL8Fb{>*o0St#w>`jT`&O$fZ?>2L zbW>cx1*W|+B+#A^-TDT_icaRWH4d#`QOV&B+OV7gLI`FD zv%Aa(;~D^=RC9nr3hP|>q+(Qsj7u~LZGH?GOGRd|$T=&e`>n{w)Ku8JO5S$9p;gaR z+m%kN^SRBZ;Wo@JB4@sJiL6h~Exx%msg_B|< zS;3e9^IoGgw`+h%Fzt_yZvxCLQ$;{s%|VW`h~k{X;d)9rk=6SD*dH*<4o4yofDo_vl5T@cc*2Egl%VLd0Ho5hw zM?;g9cl2rIxg9QNd+B|p1(5;0&ZuMf&D=$G&Uf1YfIW@MgV>w#mbT0@ng0N{qK;%` ztoiliZ++6339b~61LSWFceoy5!)_I(IlBZ6h?QJUL?i8Y-A%# zVH)`Y3Ysc!)sSQJLzl5q6e8jwZNP%ksXgN*WYhG#mWK7owhxBMlAaitccaEFqx==( zR1r+hZ;HozN8Cb^2JWDWS6C&O{aC-w#i{MMn;@mIg~5rc@N~mB5wd)m%+!Vl?o7kb z*VchnUE@|K^za2m5vrM~4UAun!O+Jc*lrOm+c+TgtNQGqsF78xS(+mPwh~*weoOnv zKZb0$*hLAQpB(m&#>f>#n6z&(>>!y15dg|%-9JF%4l=j3OCr^ooSmr!U*6$;6fIKu0}V1oYO(^^KEixwB9~Za$J7cZu}wmyG0R;h62p0cfjS#Di4KzBNaShTeK! zdzrz>&e&&8^H){cM{&TQu6ltML_A8=MQeVBN*Q)elx3yKTAV2qy9ve)r z8=*s7Peiw^P6SQ0yR7fhH`vCrjK~s|o;V2mcL4di$dxX#xJYJEV>8E?kK8G=QT;tP zp*%{{fJoB7p>9_?MFe1+q6;)P9;Jl9h@{21WA8wcEQ0eH_oI>(LI*~a_N<11>qLFA zT?vPWV8a|%Bf7#}WioL%pPyXDkM=@`j&C7OkGOVa?i@ksA-=klKg?F*2c?w}nH6i~ z5{EetqoJyu!Ly4iF#q&v7=|R4%EIof{i#@-hX4|>k$AoLtE2cdMdqC4)<<0fAO=I~ zjHT6SzvEgIZuVoEWzQER$75ctm~Y=PxEYPyD-k3%juT2+JaES*6gY7t{y<>D`5~jo zhEKz7IDn~zg%t!V>Mf)BJw&5@veUQR+ol>t7x`@~p{GPJEFw#*Wn5Xe(*Mk5AUG7R z3naWsQHhWTrLgr}Te5)N667ZqGhjtSB+{m!q(d&ekd&}*c! zmFv7c_sTyEth+Qfh6`qTISxWYsCSTHZ&V*Lcdm0`heyV6<`Q!rm>k$ZE3|%%4>vXQV=Ib2sL)2{lLZmZzVLe_f%NZ<2q&3~#Pr1744?w2K@@IdQcy=EehvhlB z6Gk0N6lwRzubRX@q8NWqb4YIky|rKf^rnRDj5?IO3! z?z7RzeK+$D&7RnhU~}_BljAe&P=wz$F`kveoS0QCTAI(h00kXkl=*fXvet<%*QW11Fgkt(07x?X@65uIBctyf1OucY}hzBIBB)c$UJhZU?F< zS`b~;-SOL`PMiMY15w}MBC79bb|t{KWIi%~3-}*Zer_O?^d`FOoGcT@R-$TuM!pBc zScq1RiCQ6MX)XmD9R5_KA_z&_{pVXDJ+&x&&SNmvvGS5=c$vAIh?Zf*9|%-0FwOHr zytWR32AF0nK!WXaK%qRo1Jf!=ztEMYL+zPOY ze)fulQT%d|bq!NuPO(ScT>ZPYr1?ky+;x*X4;eq6-;JtQ3!f(bVp{e@l8AuDGWr|m z?s855@zty*%|7&o=g0kqJ!z>KrMXsH=Nq_?7!HPr<^_|H zC@PtTbb%+cG~mFI+5ye1W$+YiK1PehY0^5dN{R;=yhTqf(e%Nui11w4IC}vcQAA{G zP^Oxh53U+nxM^!IFRbS%`!!=S^|Vtw!$vK(*V+fQjEotE?%*_kPAE6GAU3L%Ffi*?B|1&D;`Hq&fF$B#%|0Ox$D zm+{*RTAbVPagT?jS{PL@eJV!aFYMGRRMQ10*L~v92v>x1a<67~NzT-{Gf0A&o23Da zs+unx6Vt|S<~GF^0Ha8#!D2+l!}+lg0xHeGr84qm)NR;T%G(W=ceC!2&?$UIxr`{q z@oe79=rWoZ(FjkF^~g!^fDLEdRyuBRAY%M1vLUrsCZvbl2N(b&n~=}}+H}cqPmh4((MR>?PBDX#%D>Ai=M?K5Y@o&=q=;6&I4Rc{&7_?=@exF!N!AOeCpIjt!3O% z4R#D(h&7B!#9`H*pK97C4L`WPb&53+3y^=TYqKXiBN?FuPM%S5bvDlLm7r)ap`hPw zw!y15Q@tLc^+)-J8frjAxTK#;ZE9~+$cbW65sSMu96Fba8ieloA;p>U{7%#t<+O{I{mu&j}P zO4W;LCa(Y4kf(f?m`g9~Q*P6rdX&wKCGrPK^$4IPVxWdkL+7m3d^H^U!JZiV>H5>B zVcqC+@0TQKr^d*_DR0zX=Fbx%#gDl6u%w2U7@NAjtDhVnP)bEiAu?IsVmbnGIy$R$2402{i(x{ znLB5V%yEt;cq=^p$}(sTbDq_5_o+{UkPqBvZbAkpFQm&ybn~Yklp!~nEFqmWEi0NM z|C9!2Er7gd;GxhEmbQaQI_WAxri4Nl?XZAq9#9FP@c{^vhyQzKIXP3 zK8n(K+XH8OYBkouT+s#`qQ>Eoi{-gNh5|g1H!4OK74AquC+-$pYMa6an(`kA` zI)a0a^GUkCEnPwaFPvgmBuj*Yv+#ljV%YQ<29m!3vYuhQ@ODf9(e6UC{Z+$9NA(h+GojigC#mj(SHz(#r7JMSg*Jr)Y47c(*d{*r7T~4bMSnGe44S!p7}AU)T!5*}`WYsWQAr zMlu$dM=X>R=<)AEVg#Y0{>jh^QI+}dUKQ4dptyF9a`ic0g6wA*PGD6qjT5gN$h0yQOodY$GKonGqRyjZQf#AKM190+Iz zWTh6A!#he3dAsmAOJEiE=Q*tC1bjTb;S2&I15_n zn?$4GwimtBeT@;nSq8T_lia$NSL==f76V7}#Ihlic1){g;D%Zt8@(ncfjeLmscyRk zirJ|?I5Iw)F?)fgUSX+S?F?3}fe5slPE;826Izev2aM#GpRwjO18c~tf+TGp6JR!@ zaBM3nN1BcHOW&$M1e%NkCn(@`cn3oQ1?E}gJ~o0;Yqo@zrtWBlG1L?f;W4-nmt=}t z3m*=G*?gcX`r&gL{(9F`TSPhKWB$k|8Y{~Pw7zv z#MEKdh32oX`4$i*rQdH(FQ1&6pW3w=d;_(!_9xQ4PQEsPJb?5sKytR2TFd%byJ%$?KBW=6pF`S zCeHkX{fv_J2}t6R8`+HCRKAVA9$|bj)>M1Np+c~QKS;L{-BMgq?3+^Hxy>u3!?}Az zN_K_`ZQ=aHMgQrB<9yPVvn+s0Z^7zqo^wG^KCWB_v7^X`ld~8*IUq+_@j99k;_7yT+4SD*4o(|PM=xq4$6DkyVQ6P3t2mM<9k)0M>>Nz* z>c8auNQJ55RV3zS6D2d2mg=^Msq;_8Hi}iWxj|_n-<O5jVP`uYjR#q5RbM7gZH`hEsYP60_2G!EewW!9(zSDL&K=c&PyaHxnYB_%r#35v#e zLp6bN)|TQOi>d+aXg%!vSRaRSg7PBfId3ytZB)g5-Da{-8?QTt(>|7x3FS)qTG5`H z_0hAabE-;CrR+DgK}3c@mW(s3)quZR9IDeUNdx#?tjcjVK&gmOr_mO+p{~)ev{uOI zpVtkL3wxnmM4tFJAXlGa4$j5xDsV zjYRgmJi8Zg!!mV}+{ZW|o3Sj^2Fl1Xa(Zf(6UOflL31HW2eKywU1e%3^V%8Dh`$57 zL=|Fz`4bKbJPv4o6Ei$8UZ=3diaI84>+B^$Byq5G*gNsbqMOmwGs>n&hs-E%&`qXv z>L>5H1WhEoozURFz7Ti7M)J*%MUElc6c7ye4)KYN8j*iWI%KX@9foNy?D=&M zx6f+>#BoWfm)uvp{WbNgx=$P##cWpqv? zjAVw8>&J=%moz-qlx6O@Wr*}(e9)$Ysh2Z8YT_eNzB zCtT+;)29R&sSwMCd<`Pj#pU)6soK;Xi7EVnK9?Oav6u~A>6AHgD z8k?ki27=8MCC?rl-YZ9Hf5$mlQQq27-t-b>Tyh*uR2#$?KAZ%#rWl{tF<9VL;MV>> z!pNFzHg86+b=fetme)ib1D`O{$?I^pVs>NT+ zS`x(%!Ow}Ue8A2^TfJgr6Z`_vl%Y_T76u+QSjBrQ=+F3YIA0X5L~b8w%(yf*5}LV? z?Kl*1zm>CyKaMwSetr07Dv2)4-~dCMr=e!BYp|e zNovpGgD}*#f}@Hv z=lTcSz)i}TGyXlTFj5iYH)%{KFRARYiyCL5z@U@IohV$g8a2nafKK9L-ZExKeR8k_ zXz+>2;W4m0uYzJr(_U7-TImSu=rjz`cF?eFOWQO6Mf(L%t<|7a?J~Td3xnV7A`9XtkHx-yqYFR%b;!sh`jXSDhXgp zuQp;Yjo-7o^D>NQsQ?F>{qt7oH)# z3u~g#xb0gZq~EDolcSK%HJ9;_J+ZKnMlFvL-$UFw(=!y>>gqW$=iKyn#dV`6V;q)2 z8`q-L#alEd!nPc&7RMi6Fc|ZS!}d1zA3+rL!t5^

-)HvcTsh3!U66xzQK4yD=<+ z16X;~?am5=0=2t!cjD4OLg>xVf5=-kuD5KCPP66+{`fTwe;pPb_CNQ**NvKBkK=bs zapaby^?d4mB=S@^If9LRAx_i>gnAn(u5a(-;!pr1Dp|1!FbM~}eQ!hT?nESxC>^eeRswU45){cq_Si$<4)R>#(aG>G#(s&r;2SSSC;#_QSlkZ~KOq ztVTWlG@bT__NLv7Zj|M7#tO%Iuw5iKL%K;ikeuk%zG7&RC5|M1_C(c2_SsuHIg%{~ zBgQHf8;BA1G3a4&2HkMpS`MlNfZ>E(k(kHDSi4!oa%H+?NrvXM5N~g7OV4H#ZWAB9 z1(wN&ve9gxoI#FfmmUZpQeV>J|HBJ4D}W(7B{c&Rta#y2y$UskQ_py|nk{BQe{IH6 zA#7ABTUPyp`ym6l8Hw*zS5>-J+2f;Nehil30Kvjx>F z^_l#rj1$$wy(sxH@2ey_=y+c3XWw>w?Y1CWN~N50wr#Tys-t6hMFf4t2eE%KEKYQ9 z8;wpBrc;G2GIoyo8k26TGP1tvv8nF)-Fw|}Q<*|&bF`EvjbgiY3)Q?dIb0X6-hhjT z^H0Tj-r^V*9AU-2vO(J9XTxt`kP-*R*6~CgLb5A{3Xy!lk=PQhNkem_JsK9d zJE3fXhmxxqJ@ zdr9+&!lgN8Xp;dnQ$5460GjFD^b8z+ww#j|B?7=m=ROvI4QM7dsb%57`OSCSd;_El zNdj6~TVALRI8M@YGxri*A=RXjrXYDXb;yuFfuR;yxj1tfD5&arbTYY8#}hcI!bi;x zxbDAcqM z$^Sh~V9F-^_LcK#?Rj$Jmm~k>SCccaQ+cx7CcikpW)@^Y=rm!0EM%;U zxH|B}6=p1SSReW;pFw$CP5Xg9QEiIz-TmlqhQy>gl^k+T4uf3eHILQU>}pZfLJ$yJ z41iHKIb@H5StDF*a)kS810YEDGUj8OXOkQ`Ow^bMka)k{=oe+V9fw}{tA3bmXUk%U zxyZJCNp>is(@8~)*Tg^v6aeEl?h=8kbA=-qT`da+Je^Wa7L#qx7oeb1PH~p-Wr4}t z6u*?5QCD&W>tK16^UNxGPApw;GL|?SZCx?cDVOr5miW&jQLymoz+{{&djUTETV^3Ql zNN|A=E9`Nr&FJF>U&x;0rkn=#}C03rp>BUzJbf*^{oUXMc&le2D!Np#wUsc0FABUYTWK87F=}IV$K^WW8!GhPKW|G zzo4E}CRm7#PBS^&ubJ?u)?q0JkNdi^{;pgo%Fd-AU>ziTjVtC6d|lGxiQ20eDfuLOa<>Tfh5Ius-#*` zM&0;z*$oQ7od1>6F!guHf}m5h#Jz_!HHM*33Yyf(+O8LjddjS5smP#y9Gw?5K5u_G z6ck0o%0LtFh%akPM!{e&jh9K9F8~TP*;y0u(W}Uz{HUm-WpTy4Xu~jt9}x#fkL=-_ z%`C&&K+F?+)aVGud64paXEC82XV~`43$cb;J7KaLTpgtC&2thLk0y7uCQ~4ZCuttC z`2c~j*|ykbz*8!Ul+HC54V|#0+Bj6#Wh|t+Qb`_%k7MD7J-^4E)%0+Ea3{Xd*^d0; zT8<)i;0_E;zGA!N2VkeRoMOqO+mLyvUK)?{6j*cdFZfi?%(Xb5(`PvGQ_<^VEqRa| zi`6E0XOMtl`;E@UAF+Q!+kCIkf`6G!&>-S|gv26*D)dzG_O&5fMh8Gcl z4YXO)r1N}Bxiqh}VD!QAW!_Y*2I9fLFY|d=JInHfEFg1tr-^fxv;$sI{c?%|Sg} zmGc>uaF9U7Pc?eOC!EbQ?H?0qL_HW>6znj*!40wZ>}d)1&GNRp!69}fyQW`Mh!^z6 zQAB&6e~3cl+a``-6M5a0==O&r_kE~t*Bsik^d(dPmWU8r8H*z7w%9#98Vs6lJlN#4 zNL9>6KboySqz?(|ZbIFI+VD7dw9Y|JT>PL2-%D0~%3W*Npq4+*hvrfoKv|HL;mRn* z0_0A+hPgsaA<6bMb5<5b3biwkA1C3c2Ecgqo)6A^p?W0baURqdx8k%IL<}fAk9s%% zlq7|if!g9xTMaU%geTd*QlRjdpW_-$up}O$x+`Qs3|ErF7k#8-I>Pl@xB_P)oui0R z(<4PY8g5h<)9HLl>mCrj7-P@DM88O3#i32$*qYq4&v25T_SNKuM<{+Jr3arhY%uS}(15~cRE^WX%u zF9pJ}bl}dmy{)%|luK@IG{SS3HmYunkR7RGzox7eXdjkde+sOh?s=yIi1XkCpxWWR zPMdl43HncQXUqpXal5Lmcn|VY#n(V=G+KVa_0ni*Q}L(b2EZ6H&`eCV;puoyitXje z-K`SsvhGt<=E1$w_n9UgJcNl28NH#H8$o#-@CCindXaUJe^Fd7dW^9cF2XLfgTUp4XQ|J0 zELC@skz1JEbxW3uRTph-6m2kN<6*hR5pI-f8+EktMVAJnwDd^Nj{9IbEfn8Gm3s)< zLM6Il80A7vm%JI~p)Rp-g{?Gt#1uDIQJo(+@b*ePh7bu59hUv6IQ`~=JUYYTICL_J zlNCKtzI6~SIf?W=|G;Xwyk4lldk`NpCKb(Gk+?_>g+%FU1`#dCV z9J%=OPo3Pnt^inhC$1k)+)H*?;D}-&TqHZJMyv!U`lD`pj{7tvAsFQH7;J+|SDhO8 zb+c;_ce}-p*O#@5hIaI!*&k>e*!&8elX0pGmM%uw(Vg!;p|0Bj2rTR*DFcn0e*AmREMZRz2>iVOfd`15ea6^QDE z@R3Tc%=ub4 z(HryHN-pvmV5q0*WUk_)xKNf>px*%iH)*u7*;?`mdQmCs!5dF37+VQ&+`GFn4L=rJ zKG4ai7VL%|&p)LuLnFWe71Gq9>hKs0t9>fC;5i0@CQXqxOGch?q=#jiQ88h@!5VCl zEYD*LtbjSAj;1@P1~x(5W_OGe0G;hi|Ef`H{}Rfn+1@O6!QSIu!^1&UiCffs^a;{< zIqrq%yxKBAE^et~Y0a%z>f14xCJoc_sHZkKC=4g}xL->0N6mJCa_Q96vAiCOj~Nzs z1@Isq-%A$6fy-%)_!U(~r*y^=GlU&ormVRZN-UYS*jKL;0p%$ z9#kokcQq73jY-4fvgOB3<$xSxAi?Yq7dyE2yZo&PYKgmH^382`j(=RG#2Y4evWGJ<$B| zI9X$>6Kc~>l)?)|e4cUC=(7pdMPDMtFBaEDPQ%tSi~oaVX{wdpIA9gNV4+S-W&b`+J>-U`Lg@V4X9!=fym{Pk3C-=crS?8B-crd_b)#?tl0MXzQVAG4F)@ip^YWAI zO-}WE7c^h68Ot!p#e_Q;Ytn%B#-Tx4EHk(?G{QrXWuz5!L-TjbP$;}d>=Dq7D=}(X zIJX-SpoNNux?%P&$X~^mpVTegXZ7d569=aihM>rC84tu4$H_|a`}v2z`1S&*7Hs89 znfZVqFMN-M7mLy|q0+zyOPkMRJ^A-(4lvVV9j_2!Dz#8NrAsjq=9%d~Ok zwj9N>FO*FMqhqKT#z+~)r=DrS5LC(4)Ua6Ri+leIa-j$ZZfcNu^)$og!-0&Sqpf2R zGZ&u9m?JdHMFO=oetzhKO%Lw@c-=FWKaZI!lKzr_`7eW~*)_+)rzijr(E}#=(KVpB14I2>5XgEq6k3rW*;vwEcl%b$9AORl zQSH!rDGf(e{F!=;VcHDof=;f2Zc>Dbk-x?QnJ3*lB+mv|G0JUhv_SSo6BX1z8oc>c zM+jMH+R_w8N;7;HmOhPsY~9VA)r|>emkKy z26m5PS1>e$K*>%}>)dH1#lTpN_aO5n7EuwDONv zGORlXlKk4H5{aSr-X>z>%`ai!{mEfd>4g5Oitxw_x8TiCU3qd-qXF>_1cyj1hODbu z5@wuWdZ>X7bI~Ug9S-I z=#a+r^`3LSAX|~ly7;*Ylx7)R?FaxhBEC9$xRo2dFaZ`H)3W50b%i*Fb5qx+7YgM^ z$wgn~@gb@W7j{vcUskUNi>pF-C!eaqvsBbKb-Mux<8-%(V>r8cC1cEiTx7H+bw^F* z&o0i?Hga4Vp_OsQ^FdHc9E#(68+M4DQ@UO*En1u!_wM10v#_`>E67R#Ivrq~y}2F4Q40rPt{=widL z>>3#ckW)%KLfa6qVw6Ref1D#a1zua7c#J?ZoG*H_*QcXTC%AJnl{o^_WcOoqQutzv z!ufJ1yo_t&mN@@y0v3m1u0V+Biqoq>vx1CFi>RE;W}a)J zQrU|E>Op+^FkCP2FVy+{1C+c~ouNZE^xwk?(2k}^je;B}lAd55XC3f30`SenOau-w z5SYbg&U*ldIIsTy!!#cELIcXv5mUq_TCt4D#*l3hncr!DUjBvFT*P0rMWnbPFLmUw zIKJCUmdM%;P3ozbF#EBt!TgoFK!)^`-nwt1b~;fB@vobuF6?1f5NdWsta^&9;cR!r;7CS} z9(N;FZEo`@)f_zPBEzi(G_zj+nzmt{2eshr^?BNsgmz0@_r1113oql0etAB$+|h1h zZPy5E;HhQ_pBlL$p~XE=;uB_td{EX%bZt;TT)$Fy(VDbgM(pg>(uZ!LpIEL@JPMsV zDaK9To>ToI0Uc}jHEDM1we`WkKBwQjE_yQND?TIV$9?xuV623GMR(AeQ&(scJDUn04)G0 zuX)qaiPk;+649g#vrUsD&T4&RTiy0S)hOU3nwhO{^9T;;_^Tg9i{W>kPfN2gbVJW(Q>tSZ}G((t8HiKRUuMftcRXC^Q;%o@XE2(-yL6845J} zb$Dg -uTIk%~0;x{!Luu;mkN?-22_UWj4jg%vsCVq!^RR%Eg5be8B_#y z8sA(T-8={v>!6<~SUIkyNv5~_z*ZV?p`rjZmiMsGKn0VTh~CochTGB>$8cGv*m8(0 z)zrFK$}v~)z#VlK>$^X<%~-Gx2`7aLiFSfM2}8ZnVPH$+(f(i|sj%csJuY5-1%T{N zh}c6=tA_66>^-hel9b)p!79diT6ou~^PN`Bi<;Oj4glfwejfK)# z6|U8ym*(=Zpy6GNK(mmsOY(j2%H%hXg)fTolp(gP+Z9#-Omkr97W0^S)PQI$&~3+S zi5EGRi{@!*$c^u8h6NvlyPdL~Vs24wV(1%wlb{yi$ZS?bvqlSe{ccSzITUQj3jY6U zYhX>>Hc=cleO>vdj@m>vmARbm;)2kuu~<8aH=yq!@_bgJ{iV!&LeK(6i^1U#Sc|Rmz)10+ z;Rd2DK2x_5M3!o&#{EVyak;NnvGh8{xaOVMHmAFSty#;*vY{R!z(<&h{Ji8%XtC;u z*l;LRrP>XKtlT9p+7Uqme$HpK;sw0nv77H4xW%31S@EYC%l~4}j^})QPRagBXgi|9 zjq_eHFU4Ym^BLSy0?lv0mnK4W|3w$OaSg$zExfPssi54{x-p;p8~$a>qJte+;al-t7zlGv`HCu)46N*^tBfs{09L@Za8xj|{0AiQ;rAV6T5@-COq(!euXdRUb~z440IrEAMD+AeZ;8EUs}#Z zkkM+ECmhw1X+M@lRPB6^{+thh8*;sxZ&g5U%-mqBwBYzkwv*)4v?hT{Zz0srAMddw zuB9%qG{F~Uy7=aKx@+pBq6-IzE_wmG^W7dzOqVkgz#iT_PJJu@xKggb4b%*I7`-~Y z5;Hg%7PnSyj($mw-H23mc7h-Q47ZP+QHTc;F5MUmQE9o@JUp^nwxR^B6%p^NgvA;v~G-W}n|Hv4H3votY@HT-_%vAmjLZ1%xU zPFyj+s+yTxsB7qrL^1vo2v`g_5jAn^)n;i0DTs|L+n1Mv=taAUaTRMyOzk!HxJ^b? z8^Ft;7p_NkZhP}QE-A}rvtALlC*}xU9I$+GQ#Z^&zcMG~EVWRZ!ddNF*6%8|J2zki zb@ApR#Iu&{iwi=wX4VpP;W|_spPJ2Uip5$#nFEq?9rFo$g8jD6q)5COWEAhQp{n$J zp=iKk$fLaG<#jR!_=2D7@k**?_PU>+M2}-N8&nSUqKsM)#v4-L`T_&&q@SuVtukO3 zT0S3wN}o>TL*BbmAx`70641;mnn!N8?%si_V!nSM4J`&Po{{2bRj-`xcF zIQU7F7hw-4{6v8jhmqKhby$|U8tEZF&)~4|5emqt)i5JpGLdE+iz%b-B12)CP0)m- zq7f}K_hfmlp_5rKItqxcD}sA~T;wX*6U_mF*P0M`rf-MDrs3=PLogmC!?x_@JgY^C zx*I7Ytkg2sHS0#zZ59k0R8>y&RdceuOYXu<8rw%+!9l#zOGHA@cre~7chQt*9lnox zp7}!W#upv7z-`EQf*i2XUjj9xFg_L(X<+xpgVVN=>$hSfS=;GAJ~}e3mx0fVmZfF| z%bzHN=2}}_W9R*c?SF`8h_Z&!)>3<}e!5bgLTH+&;iC81)go#N*-q-P*4zKI_Ed; z$Kt4Kvp7-C2CX+JP}O#|QK16smN83yWn3e&JDu(P7jXuk)NrHc+WxWTA_&AC}E=DobCVl@8=u+=)@0<+LwqmuvCh zKowL>A}YL-{llX^`^bkgeou|>*u1jWr$huJe_7JF#-w>Y3f}OlrxwT_$M86I;a6(~ z#OGDxE>B*K6TMJ47PgmG5|NN_pH<*n@fA8)j0U4o1a$=CarflEIM?!P+aUOP83hix zu|_HCk9pC&ee(Ry@21n1{GW+Ca+vs?ml1m{h+ARSmb$R6weGE9LRy*{ z9GiaPHm{|At@1f>kAESw8KZb8n`+x1v%cj~CMvQh8n_-9Bt3Fg&w0T&VUa;XucTT; zX*c6rM6#a1fKrQXApsZVa^)G`0aIDlGz1xIX%pHq&<5`q{qMwUY}62-CWo?n~3Fg zS@R4~nf?&a^{5{QlZNVAd0X_m(QgU`WRV!gBGx{Sh@*ObDkZ#Ms9ekH_DUHwko*rh$B*JE^Du4 z5~RLF4Ft5#neM13Y7YY#nYcv^Y@-`0a%$wDZT;4 zuiv}7Iy7PuzWy=;d2n!ZDC^Vgj*!yh_yIPiz@VF&O_4ITvK(A+sHJ7W6Ey^4 z8gw~XOJ>=2v;-u+G?@J(s;vqYW}6o$mVWGjrNK&m^LXsgArmYmeDgN1O#W8!{Hq?y^b$q?zJD(B~LBqO0)bE;()>2yKFo;I(s zMUIIhdq+nC10sId!&)gvcOLuhy4zPWv})DHvSg*S@}ioU#Ib*zryWR#G>XRUQ9Jan z1SzXlP2_VP|MN{>#Py;o6ZsbnV5jPKUO)+`d__(ZIakvQT6YavMXcS}Fn-(G7ILbT zLllj}b^|-H+^l_MyfwQcv^SN<)s_A12HPg_Z9o@`HPM_%lsIoz!;-PO(cYTTdhibp zISy@2v>#FZ;Pt(>tDm>4i3+V{(XAt74Ty7x6{$0KPW2+3Kw~1dTZerQ0AEclh(Fne zQV#5Iexm$IfHax~I7Izu-C6+LQ3^6W)6g3oKx1GcmM?+Hvic)1!$4eRu&0Z|zYy8q ztRy)FP`e#zUD;SFmEeP(0j_~muiaZnfHayVB1pc&U}V8nv#8IlqR$265j?yUiAN?z7BW;>7j1M=bRN+lkTW76;j~t_#$g}UtW@h%7R24IfJsrdRVWd7AtViEeZ3NXS9;qBN;pWE54wnCCs)5MUk{S%lmMI zYAQrFyK2X$85^L(sCcU5r)BQkN!pM+kx6y@-+WwUH8OprRD0}#Jb6)gb+zp#2drjY z=gw@q5k8{c*;&S6fq>~s4mIDhIUqi4hAXsTt^2sn4Yhjyh|*JM4L4cH6yl0FauO6N z&mY%gqJYq7iI52G;CVKv=G%^|pGE2{9wugie)m7I{g2dDD*u*@8%aG`{~CC#HPV9c|kBkN=$&6gp} znKOT@sVmj{cjI?;p>*s$h-26maL_gp97C$G{8KxtaQ)A(CAj3YPmN5D$QJV<3wW_P zr)4O}J-Aq2E6EN6Byp!rb(C~LP6HhJ^Nz2u?z3&UHQUbTe3HUE|b9rdx2@+HY=JlZ_#+1CS+AZkH-s(k*P3Qd7)y9%+g?zQ~mS-l^7h9 z14D{9zDHKCdngw%U*J9BD;c^iKFMai5}pd_hb?4llA!=kX2qJpbV-^S=1~e<&Tro6 zSxP&={y~JS*o|C4l6%ESgF`bysDHW6|AP$~t}$g1%PuCu&{~Ew?>tuVTl z=4!7saq#2iVf1B35OSIwuejpk@&bFKQ&i1;*7q&8AUALP=1r}*%f*tB2X|Ro(0GZ1 z@$;yug_tLnIEmyls5>qlZgR-wzwiKuRWcEw#xF%_unzB`1h~e|x!|`~8y3%%lelwZ zDd7booCI4VXn4{Cc=Ri~+Y{NXCh*N}RBY3ghjTN+@`$s40KF`Vsuf0GGWqXw4>@(){@_Ab7mq0L@mDe3ar*n06XG~x?{rd#;WH%8jK- zJ7nE9n*tP3lCo5@0grlWXmP9}ixUVrSTwN5Lt72pTn(+7r|?J!0d$1e6DLHlf_D989 zti>9&n7Oc*&LvgxE>7b>vc5Ui_;wePd@ zuEGT53s~~kgFPXLrQ;<6B@OJ8=3arunxVAtO>7POz?7IDCoY?D=(E3DqpF1(5t|Gx zy;?1f0TjwnBq!FMjc-C#*n&BLS7uAI)xW!(5@t;RU*@S^+D3WM5A07xQ4D59DY@DD z62P%hl5h3&b4oUmX6t0x7@vX0Tl_0?c4iJ=3)%BB+f_3#ykWchC-+W}-_Lt=%!jNMOg z!=r4D(R1oOa`ll@ZXHn9Xtv9;wepr`u-Q0`=3UYb#=^MFhLqfB4McYTL-$&x1vWn? zi=_mOTyu)lxZvk{<_jTp(gzSk!IGit$0&YJAjUerPa5$KZ!{gk|F8{)Aa`o9r(waTz1@~E1)~7*W&@G)<%8cB?UMp#o zy#RDXK}|DjQB#FIcETdm1`VW$d*J`kMu~;Qa@i*Y{@Bk^2=!#pT}cYb953FbF`jV@Y7}+5P%B^9!|L{( z^49vkcm5>bzg65maq_D~jDW3Fh#&Yg2(juL zns*7R%GfUR?xfua^=A6kib+88KK7`*=3;CbkZ|NRZ_4MSzB*jmF0UNPSrZIHk&He6 z`vz#M7$wua=F%sYHyb^G`Z+X@;?lD8DKDU#fF@TTjW(OfLK~pVVR+HY3G&8IQpuY* zP$R^d;DzRDDg!YgN`DqXVC5Ym&{TU43&zEgFTog9cv)sXxmIjbCmv-SvO5?;K|?55!-y0+ebq&mWpzHHP%9b^0 z9mMyubix87nmdkqz1%)f9o75-ZPxaIXDR;>UFQplZ9YrIS6?)K&&`B4ezu9A$-HEp z7HfO*Ai~!%FYb0tQc&51m7a@$9}AxfKn~TlQ@zGRzD&`G+ev~SQ+aQKzJ#Z>!N}lz z{*t8G$xZg5QjD*ETx-A+W5sj)c~jHUFaaxP9_MjVP5)#VrfD zUWy5nXFb2i5|`JItJJ$nHty@wmGW{kRx?`~sS7Gas$r;Mf5uV2npkxj(q&SYocy>~ zzPoO&8xD%C6pLwC@UPZl?4$=8rjs-tx#)L0ZBKgP);!ZA;J+@SLX9jKRN<~E&lO!i z$&os3D?^w)>caRvl4-DnLz`BJG{6Hsl{$AkgO`w@xEzNwCphhQvSWANy!o}!gz6G{ zeagf@DT@b7b02#Fs-q4W9I7pnQI1LwpRisupWDXSF7#gGeuuGD?ln%4!LcdTL}6kR zZuf#bnwO5SZFjL)ZQBQL!c+Q`))^wuFhG4VMBB;3s0Jg;@+JVs-D3)D>oc@nxZj+V z{scG7oolw+s$wu$US}RXs<-Lkn4zHzVlCKAl6bfZny&K@K?sIusv4Qw?a z;uf*sPJVX9ia>k2v<<~)G*fK7mR2y0G?fRet{ehPX7m{}5j+GR@C-_~)~m(T=J!}p zQN!!oEa7T>@TVr(0}4!C#v_C3*gXFnVwS8xmTbK&-{1vRe|KxOb%60H94(hF2i{Py z$74Lv|ED4EwO-~fh6N{MBzA^1|G>b-eoO;G0fZ3c+7svfFlC2D=Pk}ri~)7Uk4=7P z#P>UxpO?}IqIjl{KZtV5lA>R-2vUt7(avh+So)BTeW)pj^z{UnGMdd;nwj55Auu5J z6%JCDS(zHlY8;yrS)Y{F7l@(I1ZM@X^fBiF>ZVJNl> z$<{-y4>L#>dir8V8ex2q`QlTKZt>u&PKN>mj@yeNk~*}O=xIWro!hns^lN_wsp>rx z=*GY;YwD#YoVn5(MUj+!)nqTZ-90Zmqeh04`r;f$DdW&J>@cu1{a;`$-h6 z{=Hag{I&TN7a49_0D|(_{dmpHJ4rviznps0tcds~ZgGZbCB066rJ5%$Ta)o47`UP4 zQC}5bcj_rVFBxyKP7}8q8<*EO#6$^V&G%T99IU9dyU~-(x^RiEP?lv%35&>9~&(I`d-xij%F-T4(?(rL#3aRrR%KQO~ zu?UHX>Y*Xv->fCGWTk&(NpVXn33n2a;bG1Pea99Eu3z>S1-R2({ z^5UKb5Sd{~j~lft0)aM zVuD(Ynr9qEmJ;w@syTA6Mdy`mNPD?VE%Je-PmCms?aD6anWpNg%@CiHI!5Q(lz;n& z)sLH+R5|kL0W{)DRB)gIL&mzus`HO?)PCLe zV>2Pj+RomzP|>Z#Z!=yUAO#rk2w65DAYd zR(2&*g_%^P=;&oby<7m|JthmlFj?pv$Dr%E3Cp8w+KX#=4-JSpLDGSY`iSO7S(y^E z)fcrz=0GWkeTgITB_VI<4*&YCq;4C7eXyKMfJ3jxa%I?3^~l;p5gl36T%FxH8$vT~ z^@; z0q65`Q)WtI@qNI$BBHa`!t!1#?rJ-$`S&%iL^ zJBmhmXe^DklA~B%b$_@Mb@U#e$G%qxH}99A+17A@OpBYEyRG_~&K);g{I_|E<=r79 zjXL9+<~A?XQH&~L3UbM&!sTh6C8P0Z6ld}a?sk17a73FPvLYENiijAHF~n5R1f0Ay zKR>Zl64MvR?)Xk*zQi5Q<6IHl>VQ@G7d9SHG|>Ls?({?njK%d}>65n~23ki$Y_?xHY9M;8|`8}r6H0X;SrT}plT8Mn5@!Qqn#2AvL zfg(d2=B!4Qp+&}MJ(Cw^@xqG+c5E!oj>wb-b2fhVGR;RXE*3+*X0dirwSj1VQb$@-OUbK*6c{I!} zGOJk3+x8I)$}`QKX*v3pkEE7$VyTdzYmsvmrBnA{^b1d4B_9syasc!G3jj!K zk5SCkQO+uZ30CY}bpx)>R#MsQx zHB~m3A66AA*MBwj|NUz&7t^r#Z*>d{?n&cO>GN@G5*?&@Yi5^cy426LNG+POOCb5| zstv3piWPz?-z&XW6_a_x^=0p&X(szee)FMZ5JH9k`3VYIJ9Q$#2WtEd<5TA4^V82cx7zEg01=L_*y^JRR8(z7+d1>E^z_b za(K@&x&P4o5~lwYOT?}S z{AE>Y!PQtz08y4TuvJ*n<)ld-_fF%c=JtsdJXS26p4Bpuro(Y_SyJr06-5)^^Nf0o z1psfjPNJ3uWdqP5f%2DL^f7C+iPJS;D_U#WQ3eshe0d%a7Zaor=T`%Eawp1t21sKT z6KtqsesDp-lpRX2MP<<#Z5577eUKrorYN2U*q`M^CG}6^?aPq1NVeLJ>5yCX8;)s) zxwgSK>zc1^hSfQ3oN2aglcXIfRod4a=M}=s$`6`=SC_CF@OhCMD!nm7NER|-?pUZf ze~5aDpOcn_EOnEFZaY*j8FhT>okP9mF&Hz|J41U%G2cX?U;)Y{E$0B4Cs{K0Zr<%e zt5YHObd1cQZvjjb71Ar64bU=)=WFhY&*^JCzk4ylZWdiJ>T%i=XnrNHvL^E8ElwcD z_-#>$g)cisN2M zL^y~LZX6-)h01B-4(wdTIKop+S;Q3lDMhLcXbT;`v*bxwL;LYvZF1Dlo~3d`MuqRg zLe_QO{sTuO8i+bdKY!{n9#CNN{)R(r8OD;A5f8+*kcfsawJv-tic!h^d~@cE`r{ax zf)0!3E&Hoq@_djQzaX1l7z;oj z2N{u;Wy_*~7&>$^8AXL8=|KGk|3tZ3!Pe=cpug2%Pq;Iyo#-X< zMRKz`4IxIq+6x*-wJ|I-aMWo+2abIThVXVRp-&)Xfh6e#rJ1wBHL{n5TceD!NwvhB zB>kmwKwaG0&$p;h1HHX<6vz!Bb6JT`+=cQ!>sULJ?Fq-mQX8Mo3gC4)*ENi2TbAhQ$u$*qTT~|O z&Vwe0&g~39X{5w*F;Vunarvz*SynU1tQnS^FOA7h{;B72w!_b2AuG(bWmm{0BUYa* z&--jF`YHgR{M$C6kQuW(@`4)U7{E3Tg}qFMxY@kANYFGIQ<0*4Dzt-bapruHg=AU` z9)nRWj$sgHEQwDy(0&$>x=FG*FizIg3NybO{D1zb7b3sN?aQKQ&Kqx-|6(MdMM`e^ z34;V;wGw7ts5KaENtFSymlx)X;4%2Gfvc5rm`_xeoCg?xzG#fRR+rgXX$?IsE zkgL!7>0b#RKP?b)|H$#@!{31NohA9y>KYLuKWU_B)Q=8%{yeq+==e<5C!qvUmhSG6 z4|!>ODVBUx^Q@+yko&5W(-3#zFytzu#;bPlc~Sb9e5Q$yg_lJH?=zFB=-)$?s2q~+X{mV zQHGwe-dSXn`Bby=8pJ+EscR)!zqwdmb+9RSe`tt`V~ABZJq^Z)S8n(n#&6F*^%k%3 z{nXw!6Xe$=h~ntxWszyj4McS&V6k($!S)ZY`w^r4p(i$y5A3+(Eqr1#rHD1K`yULqu)+QcJg<{PUD3M z#e8d2O(Md~0#e!pS{4)5%aX_a5Gzs1!WP-6VeQMOcHWon^z#DJ2%Sv%7i7_?)z^%L z)(A$|K$_c!ETJV0G~c{gz@m<+d6!IzB1welaxN0tPB~7-w5pGpo+O(E#_vGN)s0BH zp41x6UA_>fD6@dmByUo0ERHB7nL16y%nxBg6p>St9sq@Fv z5RaNFqFfo)5Ax{W3pWo9GWurby;K1CseXQvm#%UxSLTPaJ$2f)Hm?1;3c|MO^3Ho# z)CL$r1F=yox(zSc;8D{!2vNu|wt8(7Lq*@|N?AH16H0K{yk6=;UJtwgOUy**FgI6* zxy2zjyJIIe;~-ETjYUTM#KU_m!0s&`HV!7aGDu|Ic!6YxDK$f$)f94_3z_(LtXCU4 zzVve!KOW^^+_QEUG;C#R6Be5KpIL~(fkAbRlY~>%jDmJ zspZGj(9*CN_=I(^WrZUJL*gG+(QQ$m5^{0IArvi&tL-Wa`n$DQ8@Jh6O;J0XYcUY_ z2Oi6FttHW@m11nFJXEXu0p4R-Y;a#>hBdmiajw*OGK-0gv@Xg=4%N22j^gwrZ07NY zS~GtXF$T(^Fo9W?gR;b$i0!6IZ^rwP)TA?DMhqxS^n9xmmOfzv0>efhx5!5KN=~)9 zIOeG?mWUu&91`AgqAM+dfr6G%C2);u}p8`cH^Xv z!DXh|K_<3hAdO{A@~8+6nU@72>=i@cH^G~515G=0K#?48C;xw9lk88d= zI|^6hg^MRr|B@{zL%N#GA}^G#lsQ0|e=LbB^DQ=3#5FxOI5&D7;sJuh&p^Xey~+CR zX%H5^;M%Ik;}|Hsin+<=BLUrDybJ=j z6|2I_FVsUbBu77(7vtP!5HY9`u{#@Jcy(~7|45mX$iy+k;m1tloA2>x#XKNPsz+nC zH9&7XeF1+8P?@-2yg*xB^L8>ujC(FxRyVae*vdytVjoOEWP6ihp4wb2l`A%k-k5W> z@$zfoI-`M%6C7R6I0Jx(>SE)5Oa5nF2AZ*WaQ#O#UWLS|rxD%bTfiUH*y()1+OdF3 z5W7E>nR-5E^@N&9_oHtE45MOwvIS~}If(~X8G}6{k7SyW8;J=XKvgM&T>U~qh|5QX zTxk_cg{oTS=eA;bRh;LsWr-Mrs5AHx2jY7164+7)*Ix^=_@s{Qz{Q&WX95hS3(t<`B3ZCE(6y@DA~tDT)@;r4^scI1>}!ME{j0SdGfn z*wqGby7*BpKSjXtudKu6u6B`pRD2a;O2g?8Bopd9_rls=K zg2t;BmZkVjs-YNrGk!bMKGC!CG1#h|b1A{o@b5N+scCi%to{;Yhg{dweE{F?A zLph4u!bg&mD)iSG-%wCdV-P4i^ze$SX4^Jy+1Tnka7bPtVmZdVECO}JRojM$grLPS`603iP{0f`k?TBs*#9nnpO=dMyA{Yx_ zH_3l7Hm+z$d0&y6A^JDk+Om(`yJ7<22F&ZBE(7dO7^YY5bDWb_=|AExnv`v+Z*3VI zn}tk}Jq`^B-<3UMX;7pbixuYk=EOy}ET>xj$~?qHH8Bhcb`f)1IB12QBnpdlCKfBT zVhqemlF9-33SGCo_47RQKl>bOhyDoyKQaG*V)}BdEz(7F+rmbYug6t?XkKN&xp<++*k$j5+F( zffNz~N~bR201BZAIPMsHtD!b5r+Wq;1{Vw4{wIN2=I3M>-~2HciZX&CNFwwoMti$S z9@h1MgrJTNfquK1N5BYwl9A%5+T>=OYl$H@y%}4oVw1f!Iz?Klk$*3+`3g(3k?C@P z5e@7@9K-Ea^kuXEF$XNbq1iWY&8$|9z_R`Jw7j%1gy!(9lm^{+ESg91QDypAvbJy@e_hvWycxH@H5>aC&;N}0K{Jyx}T%~q1?3a!*hM0P`qv@ zbzvt9Y08;Z(GFVOt~dt5E{J_p92BeM^nvOPFN z1{I$hnDa%}gKm;d+~jJBu;UnWSxkGjJJMBR*NiU+d#Bq8@|uve!VzvSyMSxXf_#QA zOYpCBK5qsE1t_IN6}VBL1-7cx^9MtyJ=7Gl39g`cZv+{o0c<_CvIUA4ZQj%kr=!!!X=w!sWvh;N?Vf zQ)gIGzj}M3E1>l#vy{z?D;R4}wNTBKPgK$J){M{a^DpZ2K?9-8CrZu}i=&1>GhdJp z?g63_l-lXqb13Elk}`?o;C>tadcn|$wbP!qKMj)o1(5#AqRg}cot%;BZt@LcB{70B z%z+n{1SJhRzAk^a&{-7mQ7bJ@caWwe>TnG`kwH6FK5EgKC$Hxntah~IPwfFjtem`p zaZ3#*yalIs)$z{QJBx3w5%2oO*$t6@meuW8r{i9%mNQz6AyjyQR`L9`kl6uiG*L*g zR8|6j#%R{8%|nCY)r@C($waEkX)sqKe=2@KuW@+Pv@7P(i{9(pGO}>G<2jd@f`ct# z7s|)mxV8q5fdpPAnt%lTZx#bgGxypnca&XP!)Xf&MqexcR1-$bZue!0o_+r@iIc+> zF-0K;N8w`z$G=9K?$ro18=j>?S@~1MK5prNY@k4bi$qJ1xh+3R5qaDUJkFZ>V5zJu zmReTK+h_2t`R283spO@uNs3ZE`Eld6nz+g9SWI6fB%`^^2tsxv{PGiOU6UoEP)so9 zPy^EDgZVEMU2$pY5qvODEWDkBRhmu;Fs5M&D@;cmbJ)Y)iJPr)ZF@cjLR(s(_-Vih z+H09*PiEVg906vTnBS`$AciU}V5sLNH59R`# zSw#j~dB`Wu?5*n~u=dA{K3V;@>ka5jU- zV{1MNIH2v5@}w;de##Jkj8LCA>kM~7AT%I+_Psd6yxz3L0!k1ft!3X4C|tYQ;~uAlpdbG=pia07Q|sqwQOLY z*oY2TB}2#RqtoU%42$DC(AX*)JFv7v^!kXX(F^UE6>-ohGHRXLDk;)wwb;a+x?SU1 z0}>h+=NwLV22kC7IU|Mm$#l~QR|uGL7+B<-ffePCW5-#RWvW0X6JU~p zq0hvh8lV`*u~-Ri zw4JkhD=fAU>$eX;e#sOHWOV3E7QD$X{vFfkOSQowKosEeosJuDR4 z5cv0MIA3aG-4*O{e3`AE(4m%X3Ehy=XS0tB7pKTc(hNd3=|E17mJ)57@T3u?-kL?o zs%gbWkB`0;%P}b^YvwzlQ!^AW)WQgTk04%7H9;-aEcxF!M7sz#ZH&goHJAI0Y61~L(>P&%Kj*rw>;!p{a_u_sbC!-NR+>9VI=7Iu zrPI=8qL>rjt&=p$%pCoy92-a@E^Ncsd)=RMn5S-YXqEYd_dQl+AO+U0WRlHFpt!PQ z43ajH!^B2m4M;FPBF!~UsdNWLrr_9jOuZwgH$S3Tjzyp_{$@4-g}G!whKp0B(p(0b z0p2*dL)G79+&?A9{MNl*u{YVbhuI@^#ld1s3k@`QlDr3&CV0$dilpE<12_UK$X{AO zz!5FpL)IzU(bNn_786^ruFXH}H=OiE;}@|+okQ0ko>}L~Ofp&HoMDf_p%H(Z_m#J|rX!u$Pm%<$xnHula*uVPOxgHXq(vDuHz&_Tp zIAo0c8n~63vV}E=M>rwtkrT0-F!OTY6$5|}u8tD$v}xKwiXwpymx;e}XdxXk+XRT6 zxvz6rHksaP8(Kt7{v?uL!F$bstT#W8O$XQ$q+3BNIpY-gK2MoV|C0*2EA^Gm1YpJ3 z?$8KJc$mGVC;_<>7nABHK{MBux&0iyVE*G&~Pd}w!vOSxl zSf{A}N`vBVkK4`et3?*wPy!F*8tV4fP52{cRCdF~u$X4%5=q|nS_`V59W;MEh7i%X zAA%X9l=n-a@*bqC#64P#W`%N~3!sIf@&BLGposz>cU1uO3YId2I$zM!`8CUI?A#@<={*@+ z`WFMd$IV9kKQSeP+`LQrLvI=DB|E86y&5T<44y7x9xp(MjX%{Ybo`lfwW?`9npD6) zN;@M6FV>Iz7f%ufF1XTSRh>byL;i?SUTr7^Z84Bmz8d|jagl`zr%hsd-ag_)SP(@; zta?X-}!TnYbfQvSxxOhA1Kieu)(OnSVb0sonPGYL_x&LGX74r@?A9&`jvo z2gOJ}PTa1mTRQ=~`GFk!Hgqs>8{b_VOfDtPG2K$f1))*O{ThZzs$$9ZT1#-HC=Uu# z?1Dh^#OL5|(yIIF#Jm~08PkuptSnx$DH{b#)K`{okfuNJ0qLan38V2`PNRM1rxKs- zHh>`6Dvob4IABfej#0V{^jDhwrv*GpmaG|cYrqy+unos{;>qAw9CeZGAOhC>R+8%g ziLYQ3^C;?QmlE@1Snw#L@(Xsb`TD5M0vt9_H;r-IcVF~hZG~Gl+ahm##iDPLn<(hF zM+N~f@)Q+Kd$4mAi$Cs`)8OvRs1h*h#32W<(aDWIj=Qmx?C*Z!)v(9H-8z1?hd2F{ zn7>TDN(ZuBbjVYzS+bY2@Y29-p!h8$*xzEx<-@#oNe6(Wx#nK0r{ffx8|2QZtTrY; zw~@mu^@qjXkg2De>3l~;$0~1b$3kneOOCsY{*5ejbWa)|gjcl<@VRWXYSMVxgm=(q zh?^7{$w3GtQHbvNRhd4mJ|yQIetVcL68^62_1wGANM4i5g}s2|ZnZFDd*S}ZCpI!Ij^*&y zUa&66qcZ<690}ZjSEjRG$=}8cc=aVOYp#LLql|`D^AGgjkd*J3-#JMLOY;w^*JcYc zgy{H$aL;3rsbJv7zwPe-{%xr!8r?o5RtMG6Hk9v?95eS1cY0swTk!yc^7T}!u_rW= z-C?&x0l)-n_G44abYUyQV3`b&sD!0+9}_IXb9GKc_O>BYoF}NH5I%5Hud$s$%P~s5xLm zfU>LsVL_3b(_P%)rufA~vosVTfRs2my)_-LihxFhR*G!_8mN#l=gUWntoOHZKKl=40@3H$LqVj})@t z)*3E&$j)ib`yAfOdfGi12x-LC(?(YfGItvmW(??3Q@B(9nyYCzK%=NFbDtSNHT_XK zO-QJXlH)LUUQ!qkj85Sd?_I4g+Wx;UXfle~fg^zQr3BTxhUH3k2Dsq@N|r=uehyjn zlSoy_+CC5=jDj4dx146^({79Nsl~E{nBzzTmmrPUZdj0ex;eI{tiT(Io!3lw+Myvg zdWqW&{YR9DC}$Hk6$}noZC$+*&&} zuE}2CRTfkZs?JSaM(e-w)tIHm&vve_$QBc&r;UDMKDDu9bc=U|r5Ednm7G)nUG7Cw z6(7q5=+}OB&>Kr${fRK^fO^p8#TT4Vv&(BJ{8$dNm}r8day<&G==44uv5x|k?`dpY zP;=eRJ|>5GK*?_CSTH`j(hH9jDq(0_`HuN;azx(Z&ty-gcc6z7xA1WEhFMwx=J&XQW_R=}xK$>y^j!~d0PvMzt@;aB`n83un|!aB zHp*N=1r&;Uz?HMwj^|VEQF;^Eb5psRqD6pC8N3k8x z*ZxBX@I+o5c)>7;Mn>wwGF4cUp9>vXv8|{xWW6wJ<|rO=2$|xVE=yzU1+L4a*=ZAe zmHvfgQ79l~c9`_9I$01KR_GgZOi)^7nhMF%BaSL`%BYKf5W9hvRInJzOt+~FrCPi> zmDjbnlGr~e`z)(b`7y^T795A`Y^&8O&6=0p%iUl(4Weg!UZ~ekyw}1(NyyWl#)WG6 z9&tY(1u3P5z7yJUD9=kV%dlzvk3N6iCgw38Sjznee!!~8wwT3 zqCbweifv_JqNpTOvjZ0xTQA!OnH9v|HLHnLu>-d^Ln5bvGD^ZD2;E-?cqEe+@&x?z(4SF z#@WpJiId8*RT5mWBn3pQT3Cs!QI{ zCo@dPgkt%%wr%%I^iemGr}$8d^oX?NuO}%oc62sObL!JT{--CX+%(h%f*OL^ItUi?oPh>f9+_o?jO`1uiQ}8hvMyv$%I#s^mp@~(LJ5l7HoY$7DmR>1g z^V&SuDu`lLhOF7xI`Xo!aRN7eWM#f!&0e@}(4P11Ku))6sRamDmRL)W+)nW z**T9G9yzs)|Fe^0(&&ZDkDU+BvaypgSaRLH%3jSYn2$mHe#VWn?w}gbC{Oz!T2gm7 zqA6^Ga%wMUk{^4)4EA7MHKKzp(nvm0I-`M2NP~*g_A(&I?m$5$h#^9jOr)h)0{9G_ z-C*BkkwN{Kx6cnnP@3PTTI>cGB;!v;svQ~Y{NZIx zIbZBkt!9}PN~X+zaYzz)i3se2j^@9Z;*69Y_2!=2qNs?~mc<^0P}*oUs(}IFArW!M z08bGpCSO=Q=GqM)zN;&5!3T{&Zb6Hqx*L9bTNd<%XiBvR5PS|lbG(49+*opVUJr9~ zd0`U_(p@i}P9)vNhZLm-%zp^UQNQ;nc~UXzG^kuvwC8F(ecB1z*y@#HLgQhQt$U$o zDM|DE<5Z+XM$r77oNC3c!R538kH94O9V-P(Yw~|#WGisIv?Bgt!O$~6>_Rga-IYq$ z4%G&gKENTYFSFlZvughWvRhG~e z$-d8Q&q<$OkY`Y7?Z*gB``5!+P?8gHa>{Xa^diEk8LkgISA5mYVbm6xEq%LVu5!_z zI%l9Nwz76Ej(zZ)x< z5W`xmt(wIZ9P$(VpqkvrBYg)EB)TJ=~oahzGVQMoXzUJ*^n z9-xt;g}s4w&8ja-OS5@k=#KxI+3CEzK}tU<$~*iK8r%bBn$HQU!f z(?~{Y?D%FtLZP|&+3{e-?@>vQm zWQSQ}1$|t@{@HH(R{{eAOG)Nv*WGd@)Q zDK$UnS+D@aLonS8gfeqHXCXd>v?!_nz@xT!I8{_LYrJ6j z0@wK#-dv!f`W+G>$%4_v5L!zCwasN(7`TazYeMV1eXdSF%sJWa6v35II|4JJ_>DD( zk4qzEs2xQC$@$|QS=e#1=~NY$dH)cFyf7%6<21sO_P_v^ve37m+jFjdJ*ieOb6?N` zsc#c7cWuBlrP8$vn3_~c0xUwZAnaWlzH&Jb&`8Y5ry{L6D%?szuUJgZANN}J8RcNR ze!44YacVcsZKGkTVDoD_2S$xY0RZM?O~=Spt`*xI$Zm~NEsg;uM{I=G0CjEv&$0zK4rV{VR~ z;+7~2is$^~bFL?o=0H4ZX+_jSgTrklUa(1;fNLE1Z`4Z7!bbq8%A`ZQtA<`_+nmh+ zUzks(vJ$LLOkb)31J))H0p9D7(6N6rYlP>hys(*h#g=+0yrzi?qfQ{Fp`{MsC@mE_ z4E`v&%tgA)(lXTv-AMPU!aduG@Vdi@PLW}jq2R`;BbY042Ko$9J|4>23Hx*IZTga| zEMxtyjoqtV{LZ4J*+ZBwJQH5C$_v=jHSZFdROP{s!uN2nqAl&Q^$q;z#S}sVCb_)N za3%_L%BG;&@$JqO^cJ<)IVs>*YA|(hJoA~;u+;M6E`fTg0BdrW@D;_4-xk!N3P4pU zoR%a2A$D(@8x)+yQ%!x7%9@*eZ*zt~%v$PGE)xQw_#Nu9a*m?JnUw>JaX*#}_VNdZ)w^T^Dim|1Lg<)7TKkfvV`MoffHu8X`S$Z>P#@(F zM1rVsraT+@d?+8sm`)W#-m`WfuCL}-lP-{tg-!69mF!}$f1=Q4CNqkOReuHgO%}^} zt_flfS-<8!GXcN$sL4;#3DtZJnLI}w!rflnCHj(_t2K4;P$zJ@iywEO9c(m&p5Dh0 zRFHI(Cd>jkqzXIUHjhlo(%B&J0GI_@!{ZmO>|)Td2_!t_j={M3;oXdS5OX2$B3 zFBA_ls}0CSEWD0sui$Al7%_1qj&(5vaOy3?+1YTif`=+^21!dV1(-7ZkbE^?jZZ1w3IXl1~C7)_o#gu1ZCNW@3yEm0-34|8R)N| zC+&d?yR}XKYG0xZ4Z{%YX$273dxk5_r5)AsohK=LDsVWMREr<0W-QEouxpxyO79X_ z76ln&z<9gt4vMV0$w~p;r05#H2Ol>I!-a+=){=aWI6pAR?JTcDrr5^Et?+%0k-6I$ zL{~%!*XR^AkoIxSQD)9u^w-uhgJd)JD4{XivegG;dfd7U6zYN2;~683?%)=4!j6_4 zexmL&B_M1Ov9vk9Y7CGaNX=_#8`Ahyype~y(GU_6gg=F(;4vAjUqYx&*UpU@DAy7Z zFd5fik5J@;ftXeyry-USNQDqKwoY`!;534zG_7R;;1-os?OMb#w9uzwRFBR{}Fa>$*n9q zlKt<&l*Rio|A{p`bcE%bR9RVn^sUV!QA}n=AOQE})YQ5kaAyMFs{5Z1d0($892W9e z<|9op?F03&pV$EE5MV6%AnjVw_vyl(=e2?x)#pEQWAeOqGrwak*-w1PGul@@H+p4K zaIRIV(Ql<6-!cjI^;ZX=BY*}K)~q3sZK^q$Z=T=zlMzAFWv>;MAF^tJO*Nd${|4#F zDH89EuQu-z8IGofD`cawJz^HO&wp>O$ho`ByF~PAeF7sh@iz|cQ4|e6AONFj5?U=p z_JZ}V3=gGqm$g5=#X2qC_2v_`hFy`j0s!;h=JV)O`X!e0=Dz>DByEzIUS_UW&-ccH zOjJS_%Z_5gZn#yAzia6cQ|0S0!4D8)A$!dsFwRo1)wIbg&&r}!}$!aoNL+DhT7J&MaV>wS@sXy%KWiA(v$7~R~&Vmv8v2rXM;%-aLUEU1~~{HG0c_LG?t|} zhC@Tl1zFsd(yW+ZEZds_2^aab0??SUhR6AxsN1KjQJuT4NG@(f%Gw#%eSF_=N03G8 zl9Ls3+zLiwE9^J|C~pgqhZn9o5jaH0i@XIh*`FMiXDl2}>q&u`=)`8hs5zoj`*Y-* zZ&1D?-hrIz(~Sb9#hm~L^rAmZ!3<=?6))-`kJ?IGTHMpcZ8tg2T*}DgNVX1opSG^W zZKu0`Y3&;4`&Yd(pBs*VMd>&hrNn8ufDoAnALN|yOEg!@FUq(EGOIu-sD{RD3cY8l zpCbsFS1AE-{m%LN9}vv)k|-N4=Bdl4iwZe&Hyy}YPplbaN!=B&3CSXZ9f4DWt3EGZGq^mS(}P2<!bCx3CR#q94OCpBHgef~GM29BrGjy1W?dDR}wvE`R zYTS#b8iPxm5(lN)!Z=l~p{^S<;UA$>{~VvaxwG5{pW4Cy4%O@*n=B!1wzfwt1E@|1 zd?n8^Uf4*Q%g%Tup?a>-NDfVM5%W;c*>p*nlYhuzVoh9RmL}d@eV<)$*ft`4xA3X( zmS%26tG_Ca{ev@3Y0Cn#NH~h+#};bTJ8BJfqgip_jfcLQrIIYuA!sV4+9g(^4{$_+ zYm*^Ohc)ivgJwdgu=+XGR|j&Li?-U*7aJjY69%&xI!E^}+O)hGZKD2cEjy`QsB~IL z2NB!;2PZ*XqKM?C7d>g(7>e28thQ4w7LR#1RM0XxzB=?agCtAH;s>1?5vB7z)pDvK zP0b2hBugAks^iGYIjSlY06RTof;kB$ZIKxOI2?UM%&XvAp(zds*dJGxm<@kPQ{oaQ$k#GOpSs zTpELe)MQ@7v@$GX;k#HvIYie<-s zURo>vkX8FG+F&^K?$SOnQ!Hr7KaCYkBQ!PO~oyrXM z;LbycqZaG$KHMADFsQO|O>76(!41?`is7+tNJx;!wRCZ1pH!7nzgGGc7DHRfHA>x!J~jEj5(Za}x-ASLeQ3 zjN(_68Qs=IsPb`|+}4`pvd404#dgXTM%*!`JL!%dA6$|~2-lR-&_%a})X;%fKRLp0 zc`Zam$FbYS{Tmje%y|SqFIRE^-)dDI3&dZ5l08tTj6pt*7y#T?+tUg_w8a;YkJ>f3 z*fVr82J{m>2^BApahPlYR7me4ZJAm>0gZ~ewW>Fm#&%-806;is(GHJF7P?JkdA0Mi@~wm@SKJ`*J=?(s}he&C5|BuBl#1!Iik24 z)nCZ@$CT!u&yip4M@|F3N*Z{E-o0L?8OpT-97j&Wdh^iyi9+6XHH3gBE}1|8%i+7G4_WVSqj9 zX$yvvI!Ta}&1ae_43qNGf<+kxnD?5}sSz=7`r~qa-@Bp)4YL*GmKxhoGwlalX9=1P z`B8+g%bL-6VRbYUUD6879F{Vs5Wt)dGArsYYJr{%=vYaC@hGyN$0{~*By<+#pcq@l zJWaWCPHln=c(M@gQu$jR116d+-5E+*c4y(VVYgKzP!e)pYwsMiwX5L&Xcm5`tpP2n>(-tU9{Fnk{?eK$6M#;;?DrBTws)Nir-7NWC z!dF&mstjqSSVf5+i%rqu!8^`L8DFq~f+XHAq1S9_&BJ+y9c3mymSg7dQn9!y_q}}@ zh+nh6n8~Crp26NNLGos9##R5C&#_3tlpT=2ieM>`?rkdqW>cmtF(hH;dpx8HM}TX7 z&ZjU!^@8GeZ&Pz~L>AeoTbf7b`=|~D+yhnEnHKW_DP2`pC8&N9A$x=k00KkMaI7+$ zk`ron6gUTMtx134ANNv3lVr)9XARX57(Zo!?zL@6)P z0(z$r{&Y!DY@l539aeF<8);p9sAg>{VuOb%Oe+k!cf_e*yztk+u9!AFo==nYX|{ri z3{h$50mJ+MkUjJc_7oqTA)^ zTud5jH|V;UK-4iXIYf~}p0rc=evVp=rd6rP7EY(>G;GXqY``lX70Gh)3?Z7@HaFnC z4@t^9N=9u*zjeFusq6BfdcY0E2_kOZ_NV;& zXUD;=BDx$s09^+r z#aF!g%Cf%hY6K9uubpd4>iX14HOVHhj`7&h3?~i5B-aE!dIKIPpaOn7&M=`qQX_l| za!Q@lCP#f3DLOA%_)$Jlt;89Q1_T8DhgCe**}{D`r{#Q*d4HufrUDoo6o;G(C^J*H zbR{9lUPe4ZEXcaw7=YR|{JQg{84x`dSPLf1>QzV{EHP$qIU0EJoztg&Szgs@AAsn3 zo;ik>WiZWA&nGrUUz982RnY^pY16s}0P_)DOB)2?0LpRNKH?+WV(A5Nz z6pJoYM>WG!<29z86k7wQ{m&8dkgl?4{|CbYCNX{)3T#5;`x9UxzaXOK7OMaffAgx0 zZueDNwu&A<q6x1R=1dl_A3N6+s=_!P7 z$FC;0O#hw&h_(c@s+eh9E+{tPg+)mks9}e=Yt$hN&mF~5*#`=`I^ZoSw1dVk?Y8(x z8X|M->pbVjb{xn?SGIp9CUq1eW|?N<3D~6~K=%r^vc*|@0{$5su;kyvgn5Y>jQ@!gM=baJiE}JOzFIWc zP`(OWoV6)`Yx4yq*aK&L;63|dFsk|f>sMUud>+h;D*CvHQP z4x$W~1POq|$#9#^o8*(E@v)vaBM+l^8XoQ~zmJ`JZCvw)C_TrrFKesYfM;4O>>4~i zHg--d_gG`Q8z8cn>qhaDmh5oNuu7&y6s&8KW{!AxzwZ;RLGI@QbYK!dbGNjiHjv#5 z4E4OMyHXzix(8&6-e!sWWtcV{GxD^(es_bZ5yUvzBs;%;=lLX?gu%$Bm0bf#ioiGL zk?d6Y5P;xM0Nk3^W~&%U^VLRFK-#CCIvxniSM!L0QOy7}iCQj*J{pu!u{3wA)0Sg$_>av`nU2qE&rpF z8hS+>Sm-aa7FDi0ELZ@398a4In-KgU>>(-5ZvzH{PAmd5H?4h|@3>J$?|Iye=rE%1H5$LP7d zPT3t2!2P=b=mlkdWbnbS+iB`(xk$TNzn(ZnCD?FTV2ey$kU=&}gN5%-`pcc)hk%V~ z$ZJ|6^IEY-=_uUU?((VKQOR~9wP4B$u+h&SeCn>Rvsus=rJ9<6Fd#TV{zM#shvPff zs?VEGsPbY!uu`*F>fFw+g77!qVIp5(#xkF+t$i=yalSD=+*pJ}#2rU|l=O6HiU#K1 zae0q;9xaM$9n7Qxc46B*+ft_+sS6m0$D>IqAf)O$B+FU6YqPyJm9mcl z71bba@{Mat-f$39q*N-Df(aHA#nz-Rx!(>@b@3}0eoMTvk!!iqa&nBQy zvzrQl)HjI`Q4X}$ZG1MHSt@fskD9+54wV?Sxt+uq4-rYqvyp<)2y1=1zJ9+|7FwY! zd%2(ljAwS6o?#Gk9uF#2MlEdHd&7Zj9A%=!QBBSl1ehETow>}8xyP}hdj9%zXMR6Kh8sv z{SMqMgGbZ5)IcIb{BhLec6T;eLSt@Aeaja_DKWt~(A zmX8dDJM^u8$w+#4^=C?;-hw{ph zsHYo>ptwH<9!0CBY)4bjfx3=Leb}Iip~YwepKw`&_Kuif7(+Ty32Jzmd9fjmlv4i_ zHv3Eh-LCsVD!){#F@gYyt^jlZ6b;^M+*F=r1fEtgF$S7R%+(gA%*L{KOsbT~I4!Vq z#Rk&c={-#S*>fSY(s{nh@_f6tBy;HKOIF%$VxPTaLBv(d9@%`RN`3;iNVEJwk0ol$ z2B5WErhOWp_fOu^Pv$+yJQJlqP8lXl!oEdAF9zC9gUfAi{>s(GY^}7L^M{B{_bAzb z43V^GUbVHaK)Kq!l)XUBSDU+poUmvV_%$Y2g!Q*!BpWPf+5scQ)*xZYrjfO&ZR? zsG}|rH{4?2Rwh#b?FOLc3U#91*>b~~@Crw0IapQ>Hhcb)@AH72+n@@}IbW!z8;dWk zds$x)w?O`SZSv1?{|kxuM_>j8>ZrDKYZ&1QlBIqU1RnpYi3OSEAWV*zSHajRb>Rwk z8ijucK_vy<{?22WBQ`vyuTZn+jg{Sq*0~_}_yVuTN(`)4y%^0ZnSdHP%>qd$FG+hA z*I|mEv?wV|XnyMx<2)ai-BcFIjgnRcwc*XBPfA&u{bi-mQz-UnD(`MM<8h>Q%*R<@-ye1rWwM)hVZ%05ZqBtkaluv}!|Fu9yjw|F zBxj|^M5!cTcFRnHK>S3*+h?wp)3Fm67;Szzw<>cn}!o1%wv>tc6n)k`M zTa1_2!fCPT);598%qx?wlqE-}!5a%)XBR=Y4gfFKT|_JuqCpc_?Jk^@^02eZURFAvYLIW}0s^x56nKTJ(Wsgy8&yK!CjeMYo`KxXf zvVW2!GHh2fL!LFAlmvy2taqcaZ)nrTVwD8W@PhazH0gt%xItaV5l0#`vXWRu)g7>W zoV>5jlkC9jJLG>9a{Ed{Zmes_2Rzv8q?7|y2rpy2iOFUUPQP=!L(Bc+bk2j)MbOe4 zDFhlCa}jWYK)Fk#M>-ccZKY-6GS&Pnm!&3RFQydBJ`&U5bs-jH&`n_Aj!q>2R4Ig&o{bKmnkYS% zpK8_~R4*?Dum2&Skvh^1l6Kl|mb`ntWqoU@!u5@zv;Oc)Tc z7Q5ySu0b5aHd*nK_!0Y9`Q*)#{>1S!1~vvTosTW2;pCE`M2v2fw8~GE?dIMK%)g+B zm*_gTR29PLeI~?rm#Vat%j&W7$t|sZ_OoqobnUi`>#SsHJr?y z3)w61#$J{&2cSBt|7kf58&6#Pzs?6d*FqxjbHz3WDScNwqn#|F*aQc#jz+d@tm@hb zIk6bcO0-c>T8D42979%A-s7}oRL5Kyh76L6hqJ-5m<=(7#@2jMx0&}fmy9k;j7k2< zQB+EdaXFi1PI^=VFDPNaoZgjCv=y7gux2>SnkX!eQPOhh6P1s)VI>(zxc6!pQAFvn z9B(tYz1jl7U*5 z&hdxuV$t@MuU5V=dVokarC{+aY=faqV+akyb;UqNk#e5IkAYzWdrG4 z8MJKzTr1Q*@k@1Z$Kpz?gsyF~)*YuJ;UR1$1iAiN*I)uMewofR2eOQcLy?b!rmdoP zG0wE`+_)M>Pm2#o=h#E=?patdd?s{Ta{vV?>kG(!ca)ZOHW zp%o7LFHR+%4h*j^76a>Y=3c91dfEtr3P57sYt%Y;GE%hh0;p1DfLpIGKcILnc0O-- zGl~X3^>dW+78#G#1|cG?fqCLCU#$%N7SvK`#J6S1i=+EpVi||znCPI7ma{^+5&yEd zI^puDS+H%?s~x~sLr7|=BvZAyS(gSRoj6Yzd6?o(p~G{06ELGlEFQ>2j=Nf!%fcvg zqu0&U4BWuER)=#A?i3R}4tM-2(r#4qU6dyvUE(>L=qwAC$`|gp&6UCoay*H1Ht3i^ zN+**&fRWZv34~^LSQeK2AJ)gtwURMlEAMB1`*UzVbiA+h;gG|`E-~$rr2c9t@}BMo z`m6XDi>XXwWEz|By4}cMMi_qsf6U0nzk>dq$tMr;tq)J^Ae2}| zSxY>ITqcI2YB zon|?PWf3QOOf&`mpiZ2f)_q_9I1ZcPte~Vx5NTdyP(eaX^>0tLKDGth7E+Xi7@yYJ z<{MP`VRX~OY};0bG{tHGh;N(*HjTz82F>77mhs_K1@F(==K&&jqt{1_LF=9|RQBR0 z7kAaNtZ0VWZa82&s4&d}QZZ4BGh7yT8W#2XjW&xTsywcrP@YG*QJ6Y$ zsmyD&TmsTS5ib(7y+C@MoKKZQ+yxMMnik29|^AoAXo zHrsRDNyvKasxMTxaf#HU(es&LjPgYJDFN}gxwBQZpB zam0Aptl7=ThB{5;&2VJwS)T#{tv==2(tbBXB#%+6mSYlc%`K`?2uj(tnw?l^lJ7@G z$boc@YogR1R$q034?HHSqDlSVBOgQ(KZd^{5?+dUepOD%=8I9hpR6$bDuLzz)ECr- z*R)ZsZq7Kn!%h$^lAMOZ2aGb2n5YE^S{HE>@IEPV8e$_of7oc79Y#m9T3efC{<%67 zS#&o8(D;PLpd(h|g@ZC)hVm~RIq4DHi9-uqE|z_51jK10?%vK5dl zH)`%58cafs&?=El**FN%FS(t;h@_6zlgVU<{+5~ia~EmNO0b250316$`hY`!55q8} z99Nw8`F+fFiG=P9cr=X$N7NXl2sV!Ex8X4nvII+hJli*8&UdlociaQ&;IdLH%_Tp% z4(L2M)h1XSv{~&bTR6f?qA6;PW=%C0or*&=UfqnId~)ryl~T>(!Q&w}x6?3F6Y7$G z8A39?a$LH1nw8!2pbv40>ap~1EXI=TWa9eGbFDI@$*z%Q__wR3XeWxLQJS@JcfQg= z(}*kpnm-oK3_egMk#!*Rh@m|N~Dw6$pH)$eW~+DbE<;FSjI_gC5cT&hsZ zIJEuaxbzg&i-N`e`W!R9r9r;)?dX;y5itKyZWFN;7*2bz6$w*Rn{8DY0$&6>&np;IkFhnnrShcCip z29__@(@k7SD_lgidWu=!8ku^Qkf)^p`FDfaFFF7rT$`~|#kTpvO-Qr*H5x&B;pY+a5Uh%7CZeb~W+W+y;j*9=dLPUXWM?0SzWh znam86W+@oUo;1it&wO&M`*D7A8YJ-mdBlK60WOxD1^z~YX3N^D3kt0V+s1tlbKneO zq9(zeODF@-8}m4lxY~ORw`bReoYDpgM|&CK1P+SgY+4YW4WzGWSmq*Qq^Q2jo0>sY z)Zn%(O_SDXN@5W;;IwZ8OfEG8#s<`n-!sBOnhCkM6!a2LIa&Dt(tl8FH<ksF2wTBZ8c6cd6ZFsJq)XqY{N4LjIEBFLn&d-hs#xTRSf~xXAj_q{6!=;rHNYtWul_3#4oC|BK90_6JL)8anIQmmD@ekEK&NSd5&ZK5Wjr3VlTOq7ZBQ(LQv$(5Z0U0+VayAnJ$Uj1e! zWg9OaDF3m>9Q&d?u0#pwxhAr6#(r)a3(WzYKI|DJJz~7jt}jWmo+y!941t@np14Je z(GZQ$w6u2e;ADVwj`2rDkn5D8Q=#h~`Kmb^<#MSnMW6Pop{ z@A|5}XPj*f47I9SKKZE4aKWbl%qc+Kw1*FX2V8b-c>9l%v&(BmQj4v-;t{q_BG9`T zG+%;KM6tc)!9d?iaaZ@A*vtb-B4hEP@jIzLoHHvyn% zT3y>WNhhB2X+AGh7NV&`fog-k=nK4FpT-cqFbw5cS+4}uY0c)f&9E_cb+0{Y+lt!S z$T8_csP@ms?~iPOIEL8VIm%QI=p|eBl`FJ&EY4>s50Diu|J>1@jDuR6tdux~Pm8b2 z!;f4DY()?-r$7P7NJu`>(8-Vvy;|x*H>t`RfHWQele!jiz0}|R^WJ?~jd23#5VPi= zaSU;j(6Ufrn#BZDfUDrOM4_CUeV_Z1>Kkz^kc~!j*Tyox^JXczOiX?PEjS5N$>?@t zW0`p*lPBIx$|KuUvhQ?r>S3s;&))rQSUr!qY7IzqWO7^5t^Fs4vlQwQQWsP)nBgSz)mB-q6jb!WQ+g`Vli8Xl4r!&5@yUmo7d_j_e?omxJ_mwv4Jxd$1-Dj;G)Qdm z|K}p}r1Ew)Ap0zKVas>6Ew#33kq`N+wBlDAxY1jhaVybPJJ(f{m2hbjV8+rl9fwLq zb_+YFz0jo{WeM+;IPpNWJkiJTo^4wK#e9Kept>~s!LSN#T9@6JvMFd%_i9m76Hy~v z7AF42V!4Gz%xXzE?1bi?Lx_C5K*WHG)D-Y@x6h|;&%-DLGX+0$i0ky>hUovfAjb(1 z4dk`_p9utCc}1)Yo$@30N{ji3GaRXJS6ZN)E*T3+kS%kW=2t7)+cH-?P=y^2G@1zn zu|m4kufQ3JJYyB+&*g59Q}nRd`JR31sDdPF$H$hE|M*9H0kcC@;}^EjFS%v@kCK{^ z4xEw!4qdGI+&TBJqiV{SK1qW*>s&J;5LdEzIL}+C8M5XXC;sz}YfI9&{2c2L`7mb| zMMoS)+#EGqlSfr>{c>~2=20stO8q9oR7VEV(mLmlquPhqV0g%a`)o63=DPc1DzK>~ zBhwpe?d3d}cv|7NE#){r$BJVuRo_234XSv#OLz=MVaDo<%#?wcmEkbq?V9#&2GVIh z5z3%~Vr1^DLKsf!x{R>TM6%gOw)7-B$NAFPIk&=CSQ!>P4S2wBO3I08JZJP*2q6@h7jnyz5z+cPfK8%=V-m+RDAF}9(>6Ng zhbPN5u9Z4SbFz>PV$PNJPDdDhyK=^J=^K;rz-!Pki6b3Yop1s*ZmKL+b%u}-gbCVT zT9NaE=byVIBlY`=yFDs6eiflM>%`j7p)|AI-B}6Zo)AgJ--RbJVPMC9GEinJ0`7M= zh~<^T==xoEr^H`T(hp<+zc0j6ut?8VPRDP6g95kng+ AEGhGnJ0G~<-`(p(UJ9x z7wxeBSliruQN&Q10G7+4+u1PM!WJf zhSj^6fs{r`S^+m)dn936$a(K}!RGQiM_W5}ULyaaOtAS&`bOkQr1nVbif*7dTOGNW z6zznM*oan;wgC))L45fz?Wr#>$;IF)0@d;ku36 zN2>v~Pd;i}qm0c4nRomBA)9bwAv5eY*i?h`Q;96GrHw&)-BA)dMJknL6%sIS>M_W> zi(SgR0L%@$9T1?CT$a+^^c}qoP1ff9#G~J4|N4P~^h?QuPv@y`!$#zAc1~m}} zEEQgEYM$J?@zhRo+@oU5itLD4X_9766LL7QZReRY-rJxf7B~}&m>6GRuAmzi^E4E5 z%U)T5#)sbLHBwX;v_A*}`&xdj4xm{9$fHIqD%+iY7r0>C)uy63jL?*GH6gYLiX=~X zi`zs2No48zp@rsxyg;rtME7b_Wxu3Uqp{Zl9bc8D@p2$jNd3Dk9s|7Kx(p`^CYq}` zx4})pP>Z8uKe0%P?ZxDjH)V)YW*Xe|7{RPI7+ZBA$Jc|yGYADrHJGtCppk41`w&xH z#N3iC@!(+E5ES?p6XRjqEY~cq%ve+fFB4$qKw7jVB2fe+c;g{sIBI8@mA!DS zL^`Eym=>itU+rl@6a${VI5W(IQ}`|o%3(xzDd^)__S=LBRva=W4O0T%W&a^RsuIql z6Q_aWC=C==frh@)NKs$I;6kiEZ9eaP-d4E@3)ayW0)BoX_Fcmyl4GJaRkL|5LnO%! z5qaLM;nYm9(a*i;edH`=Wgv1+p{Vp;3o+;NvOAOqyt%BN=d&aHG^y8SkQgfB8B~KW zZ=VzoYvy4Qt1KB&IE_g!ye6!pM%^`N0wQ9QX$z)oElA9DU!lIBi=&Ca@^$H&SAU{N zs~bu|y#;ja*@(Yl4PIvSv{Wng6B$8fM>vYij)HFSf*fuHac@jb<)4HK3@%YcnM=s< zP~$#JE&{U!IkF8qpeIU8j|RGvyOZN3O;{YZVJ@enc@%ZS$eWmW$%a{~D#pUu2P;FF z1UM)Y(IVl2p~Y&xwoR%N_ZUmpzJ}6DT(HK8rmanVqPTJK7^3$9DD}`-R1Hw&#qmQT zgpyqGQ4q{S-HQW9%Yvn)z?~-B(*Y{7T>Aa1%hN&sm z*UB*L8hS%yB;w$V>nv#&vu%QlfJ%aF42q!z?ZO*1pK1S2^Dgy_oCXi?idY)JW%{Koz z^{xPo^h+$z_5-K4oZFr9cI?h(7}%k0^D%KImj z=RKU4NrSe4JpAlN*I5+{IVRKSls(UCNA)iIJOrBi}QqD{eBYk$$zI_P3RT7O5o*hd|N7I9^8wqh_1mNGK#XspyY@u!=ha%CSI}w_f<-{ z^xqHKxE;FKRHe6ZhA{w@_q~4BiLYQ89jF2h1s!+RUuJ8i+9Gc)9b1}Myk_Xm8&0`E zup#)lniDVVH84Yo*J6pf^%N>5IQoWo+4+^ z`8=Nwg|SQy?jY2sWN)aWiMx_v%u@I^{=vNuziMDC>{1WY{7Ju6(F=o-=^*}CYP@E1j^%-VXq`Jpov_Cd}`@}E!jG) z(_M}VGuIGB)ihHy4P?H#fcg*wXb4R;Yne^lZba*ebigQ^W46OqROuA0mo8sE0g+m- zTIF#rhgGxz%{S`M!*va#X0w0DwT*|j*zO$L|L$Ljo=*@fHZK+>w>%$4Fsp{3%HHeaEg3Ad4S+cU1z}6OWYO*U z+T>}ShN_}j-1$UG3iTj)lA%fg5_X#alt630f5;D-TdM9v+6gd(;@H0N2@__laB%#C z@#iL!{2U8@H5`#zlgyo!YWVwt+Slfy&BJIH6OcrNa=96b?NJ4xb?i^g4N_po6spJ4 z00m0HCzio+JV$XDQ+DQ~)-=;I^$v?n91{&j(0LD`CV7v=F+;;8gDNb$R;yAc?)(Nd zNzuHB{7T^Q2f$bPopUx-mij{NSe*x~xC+gH2f2AEJeTeuhX$p5n9_RZF}ywUM*sFG z#;53p5nG)ENu3g@;Q-VRQBEigSoDPxA;IOJU9DSs#I}T^SoO`DF=??b>0UXY=awp- zDWQS1cjn&RI@_1_PE?#m3L&63j!Oy`wX9F3eSHt>6w!1VXWOhO|Eegbqd@<|8y>gY z*v9y^bl|BFVoqMF|c+5L3mTS5SaL_kIPIj{8eqqei$K5m~_E2m`^L!gnobQ>Tt`|VAsuQ6l3ip_*|C?cSwG)oh|zk{NM{VkfXWZz#0! zU6>7OFz1w*lcOc~mHdVdp(L&(PNFb5qTm?BqBa!2N@C}%rr_Yu@BGL7z6||m&=Ey+ zXS5`e=Bmmd*TiMkX;E%y!(H5&ZyMdQ@4HY_$!=g%0DQu6MTulwDWHkVZo!KeMx(A( zv$jwE#L=MYQfwzXVd<2?*#O{yUa)-kowL*#TZDmmcZxif=vxU`EUCGhD~?!{1~|h+ zg73@5XDMj9BIDd|<(ZWAR-|5QCv3MtF=Kh#Hiw3dC|!8Sd<@1+`L{lXoufc23t?zU zJ7sYZC23_gk+bcq&6WU%N*qI+0I3AlDFs?r=ofpJ;g>Zd?-8gofu9< z=Q5D)4DZj@zzx4ax>6qeD%Y#|+pL)iel9d1=WjnzMufPlVH>WL)}0rIzR-nVXm5=( zP6v6kq|4e^dlp^*L4m1WFAQwyQP54JfapR@Gl^rHc-h28;Lo*0|4vh;8|7ayd^CjE zSca2sYrCWgUSR}z3{ZtFQW0pD&4`(%#nYxjj=xet{X5%015^rgTWMtd8rqF9>GhIq<7ce=cobCQ8SWA76o!LYNx} z(%JrUtjCAiw#WeTlYhuy{>MdA*JOysbW-_3`C;^=yFE7?H+dY@800P!w`lJgHsQ%} zO+1^MAySskIvO?$;+{0gWz>bIj|IHzcd=gON__=M&5U*BEHJ{}x7sM)avC;9Hia56 zai_2pPd}ep)$m;1!C$2zw1+s_PgvAvjyWDYrBMJVmI9ithx4>7)g<}1_%Vu!s`zS9 zDCDaN8&^zv%F3Xi1kOEX1{q`{8*^j@cggp2f{$)Wl<84vlzici5IXXOi9v#Hy^J4L#2mp6p+-W6=r z&@|opyS^2nls)GS&?!dq%z2a_!H5pIvGfcU)IwCWh)|eMChDCzIBav2dF|-ktrnd| zRxPHpT(Iur%-LxCRtpXMr-wXHi^xUUZ1XC}FJc?~0<`?djeB38tLT#(PirR_pgtbk zYS^({eKg#$oQ68NRvJ~|N-30_nu`K4MjRfyt)oV|R$9XPH3%nf2tOQ~} zlU_)WENGp3rQP;A-iCrlvKd9xw9bTu5*#3V1yP}9{V*Vk^{L-L(qs$yf|lxGz1M0~ z$L28~Ihz>U4nM|X6=wUa{oQnmHF(&=3Hndpjg4hnesbYi7?W2Lp`*x%qVbQMhNJT` z-Sc4?ZW`Fw5g@yRQw#{&34>&ZbDT(Ot$RJt*z5AIQha3h7-zy2BqU=^wDH-sj(s(_GguGW&QOVE>d$~=_2aEN055t$r0jp^}<|p`o!hR9ju@5&~x5$TP>NcHF z;S^ckooED>{LZmYd~>6*X9b`xbs3h=nIG_otIE`pguL~DmU&5t;FuzmTW&_NrYu^6 zhL~dLiJg{}a!FFIOd2{Pp{|?S{;E_>4n?W+9;X!xon1KG_-d6b*Ny5}nF()Gi;M<< zTNMCKq>?waWE_?`NmQG@J>GnPbrnO7eWi1YN^Q)jv6o!>s=|CIYY#CvQn2N^t7V=U zy+wUCLV(AQa6Z?>V|uuSUA7@MYU03?o!R&tOQcJ*2H9Lz76_L_N8;ZXwgM+m2&PK! zX+&dZLPkc_tZAVoFBE8y<^x<}_@LN%M2;6s1dlJM&k~npVW;nj-NQ$hTA+W@RrO z$6*vW4L4xc)FO|YkDegB73Ex`nF$VcDcBOToCWmmS}mvHnzwIQfl0#?r%3}6k5aPUT>@i~6E}0fqd7cT zbOL=?)>eT8JuvDhDB3`m>}pC0!HU?a^rIA7C%hkL_ zq0PB-RP&rm${-Oz3yDk8D1;R*xxZcUOO8=vU{kO}O$3Lf?A#!w4RS{pD9G^ zY$sHS^y>LU$9B!UCR!XFIZ`Gvh5PRtcYtL1Vh`(oLa!i4Bg!_O#j7 zLVy`-zA2Zpr6+n4haae1lCzL%C0`%}0SQbPpDmPtm#p7X5gVNsMv2e?$6;MWQF*>v z)I^Q;$F3emxjebo(j(M9MTWS|$I>i(y_?!FZXk1p5riiTIAKX_sn4lLW-)PzM=BJ= zk>moX6fxfGLjEo92?8@@p;uiphO98+LeX$%U6KBWlk1rC)*EpAFJtY5jefqZ(@`E*1Ere~NJ{B#tqCm0r&*FNkNeey?f<+uAm-pU6)8k#e~5MTZZ54mh{(4nrPI~1F~n; z(u@sjW%<#r$ADKf+-`jV(`jI(_}p~P}d zrN0RoQhFI`Dnl`9orm1gJG>?3jDj!V&mDUXOUOgEaMjMV&?hQ@*afXzf-Mry{PjA@ zhWFcS@#bojhT4MoIu5t^K8 zxS#Y^y>PTAQ(rNP7ucC+komc6A#^`3@}wp;9wUeafj{=gy1r|MSl2f=yI37TeD>Ft z$zpu+mQK;)B&<|Xsp2y#z(K){wiVyhqaJ~a!g1o$O1CfLlPg9!whG8cKd*Z zsebdI$T(OKcCi)|kG}Gt7>A5aE5f|SInlEfDe>bl5&|NoV6bCV6nVl5^T1Z% z8M(25)A+r`YPgB+)%IE)^h}hFn4)TVHMTm))i`CMx^Jt&{_K{8-5TMCl9ZU;adQ#1 zr;?EipSr1w%$=CKT3ke9e0CfoQLYTUmYs}sk*VOlb{azCB|Gp9a+iqIkp=ziS>lNG z1ixgkUOlY=AaSN-M?ASaF3q0TO0Wz{HQ8|UiCP7j?)();#*Q;xfVqBQX!Cz0bVCup zf7v2a1$-dP>J916bkc>MK z?`wR?-6K)1;exv<8tcWCqLPEu>3nM67?(H(zH>>KFsu$%(O5y7HD!w#r!>o%^r4g^5VZUDn$v1bFQYK%Zd3Nrwemm0)$qtC^YIL2)GiPq3 zz#L@+E1lmBy7=~=2s<08S#rL~Z{DK6EQ?ruR3hi|v8olRCZJPqAS(bd;4FM3hcD03 z1k!%x@iqSI4H#=!4X5yRsy-rQqx6Zj5}rQ!1_~fY_)ea1ydZ(lCxPaL^B8TR5izjV zbVyRk*5vGhH{&?u!MSnWjm$kL)y`awH{W!;7Fg#?7);bX$B@M2G^noI$ng>I$EIob zZtuD7ZkZYhaNv^sYp77aJWDlQDRO0~Meidnp?V{Z;h105iaX)@wncsB!DLFcWRN6N z%Fb)ypoNTe$=>_=9xtn7UgqERLQ{{vPu>-0Xh8=%hF?mfB!~h=xYe-J;Yg zQ?fp;4X{9=+**zMVfvEMQT(gxJlNJXko+R1H)n_*&dG2E=F|iNxtLRhjXTwA~kafv?38eX*V?Py9VcEO}@mC)f z$!N*%Vq5YJGBO=PilX1WEEzH{;rX#IN(ba+>>VGt5lbXTc9v>!XEf8Kax)07PDlhb zT>B^Ug>^UfEj0nS?$d;%O#76sl`qtGjq;!N(-jq`#Z=B5rRb@xHi^sUV~MiOtq8?q z%Bk;-xJ|msI7ai%r&iL)C>xQ(b%33c73K@Lw)V5K<~$0-r*H!1qX&8PLQRmd^Bz~> zlGx0FY1YS@!_GlYgPPXNIdV$Y#n-a>Bfk%Yl(M$#ccn$?oVqQvBP~kO++j@L3+|vQ zF9^zju>`;Z33gE34{zMMyY0f^F)L;%!&vksoo03 z!#a>nMo7?2pq15?Fk;rS#WEgS`aUHO?nAAjNl5laB6t?`y#^y*xf~Yfa}nwENm!bk z3p9kJ3UYc=!f%$w$`wb($w3L|d9mncYzx^Ky!LWOG zr5I0kJiAG_;qJUcz}MfQcRtQnrL(kYfiYb%9W6gc zoKY;6ND!#}RXl?lPIS;4Fv)9_OT!Y*sFuW#$x_YZ7-oeLe=d^$*r6hRZTBxv!r{U(x?M^Oy0vu*uk}h}F z_`{Dg=CJkUrgky?G~0n63L%!_vGKAtvK_Iif_rRZ78k(19LSd8=5qJ}X9 z)U{lA`JCUv`CODmIz^0x=ClNki&IY;3kyUb&PQ6zg&q#)}a z5Ez3L?o2)V-PNBE%=DmHh62#4*D;|&M*hvJ`?JazXn(GX;15J}>;O~%Kw$Q))xs4j zff{1qnrI65nbKg4(8PnI{2%kHb@Q?8VaQbzF2=2REZ&vR(G*5Tqml1CP7`L@SZ39| za1De^%D+lSQ4Wd-_+@d1rRRF;SoY05aL|!y;AJ?XiNVEiE3GvPal|04qkeh;nmp@9 z@}%S}i`>43I$Hanaac%2R5q8r$L3e*@)-R)w+R(a6U|*gk|uHkNBuRJO~mQNm`<0+ zd3A&4o;)IpMb!IOgYF^JS3QOh$$e0=_ikLZT-()sKxPFvZfF_hlFZ!Ch3hpw+{lWo zNi4?vgXg?yM>VfM9#@)% zskH0nkOq?&|CMpJO+hCJI#{K9E%-UNyO#ygti?JAyI9*gMFWR)E%25^Orwz!#rplj z#2*KAkGrz1NIr;3>*W%KYonADK|evP0j({$cPj|dNNk4(Cw6TcjAWPdT(L@d2FVN) z@R^3tO;U*V_-)TQC@r~6;o=NHWfFol!}!4W={8WPq#P^8ow2OYUBJqf$OAJISo!X# z^yfFlAfB--odQ3KPy;sjuu7m#n^Bn+)wEin09?+7<2Q1$7fzW_$^U>);E(H&F} zyCp&s?V<>2#KUlk#Kr8>SSVECq9pM|fP_+E*`aI$o|3d+WyHmm|2C# ztvmi9qH|(dWm={l8-08{lTA`Hyc*o0j=B0gxdtaMxXwf3bAmf9c10M<_Gzhn#l$gx z6wr|!D+4icvCu+Q(UZ=NNF<|DLTH!{Oqkl6k3ozXZS89Z-ZAWRXe*p*Nfl)Zb}1NN}2slr;K94l4uNtGL-qY8NLjvjA4o`%bXN2&#dfqHw#zj zpal-m6DUr2@d zaE|vy!747yIkn`sjHylDt3F3m@z$og2r$pE>}C8POTZMur3bb&B`=OYOlDCsNN|YW z$7EJUZ;ToYQHUT(>w7Sw5&1dB7d02Q#abC9Z41ALXbQFA%Uy#O5yygut90-*x3V^EhQeYEUju#9+a6!08G|I)CtQbkgDOV zpZLyXnY#jPc6iT{wutPJR@{-gFw_yGX1KXKKTxshWXL#-PiTnfIXGIXhlucG!6Q>s z#NsO_iG`!uVRRGWq}HSzGjiRPf)2gU0`{bD$Ic%WbW<$6{d_ir-g%Idj5;jcP`;iy z$v{En4)P?9;gZ7|I<{r`vCiW|u}zqzQhq1SY8>K3;I&AtLSE{k`;pmz>J)9gBd1hc zjk-n5&`sWiyrcwJW2w~N%U%l?=dHze6R5+qPs8x+@1DZyqd0o(b(aTc>!gTDfSiVH ziPydTvA}7fU&jSQERkCPiJR4~I=qwk;eyU>HUp7M|Ns zy+|~R)4NrG=y7r~b>s%!6zd}~I38ngwx_sqt=ikuIDeqCd={Sjd9Ho$F%5xv7hUIb z=m^^cOVkhZRFAAOR8k{jFIVcIP{ILRzfl( zQyA)6!LK;)xB1U?r7*uI!f-zd9r&AR5#&ZyxdmKTERgjNf`EqME|q7o6oSko0+1=> z7eOZjz4cWE)K~b56o_G7eNwGt4etegVmCW*HxF-b|K|&`U@%=Wr^Wd3S7p;6T@YRQ zU!>gt*^`!`nOz00f%L-5zNeJq>pajbdl@hC8>X#dh1KqT*k-423QL<~W>-^^hGDgPofW@{w zBm%&f=)^vduIrCh@laj>D;(@k0d1`nR;>IWnON%2N^5c%pJ7>gcI+RvYnwz+G)?0( zko7AX#UBXu*UQ9`Z?W@oc3AAAPx~r~$(dvFczx5pr#sPE#)E1YYG!x0`I^YmC%P~P z#a5WzJsJsoQR2a))Hb+jVPz2ijuSl{`Xlg%g&*Sikqz28GYpGswX&8xdgTHzy%WUA zrs2(ma-EKFcL4V#ZSw_jw@>HGyW3B*luBw*ZA+#LZ`7hP1DGtdAS$ewPs+LAP4AlxaooM^nG%~552$3g&da{7zs&U}e^fl+s_6){aK>KqrACyy{6s5TQ zl>~c*2*WjCb{D^?@#Z-zin^yR=I1-lgUWnc7M+*ZN}PbpetvTGQ7@5#rE_pE4{HM9 zFTbd!>YN@K3B*z~SF>kZ*!K{j1#fuCxwG^s%Tl3D;l{e;gb5Ag^pxSgi3`kHs-8tf5Phet8~D zGifZA-9l7oG-_k!ulk2)xw|JXauTk!V&ZQAZL5@-NNxZntA%7T_K`MoG56~sM1_p? zd-kL(+edBLhCbFkmIS(DqIjq>XMrq%a&`K!|0w2cx0&ToEh?5 z`>v=>$!vl(8mi(;?$S#PVyH(awX5>JpF}xHBQ*Pkq+u$HsAFhySpK=L6Klz#jjK5$ zyo`a^C_cKKaSksi4aQ;&riVUVd`0}Zq6=fyl}()--fN+OkP74AG>B0vr1nG=6Q`X0 zE%H=p(9KtL=u=AoVJ6Jn?(|3T0@l#jl{T!&^wGaQ^|}02grZ4VGc0a5cZ2gr*vr^W zoCo7eu*cR;UXD*|h&;#LmiPc>H*Qa(Eo7FE^odct;5g*Iy7JT;Mi55I8+sXKGv8cm zzK#2vkvz4u{Gs^OV4GWUA!yRWX{m3d@a@+!bsn|$`~wjFFoxU>)iEmM zOF_Q?;OQ7fblz@8_(L?Mg4zg}e1q#%Hoy~jkK_6aGTgTMzBn!ZTT7JFP?SfH)G;^6 zc*Z>}spoE>&H(1VULcqiIZUWU!PAN~jlvZ7vSL(`0(a2LO&b8(r~^D+hqz+`%xtlC z9YE$cwXUeK4cl;|*;Wj%8Hhd7*8&_WH+4~k1O2-c0{zBe)EKu_mN>Er@|-t#ed6Ep zS9PS>>*)^60T(Xbq%SC!jqFKqU~gCOM|qbDhAYksOM z1i+J)50!c|FQ`y^UcC}@;e{GI}on02~v%ZLFjDBftzAYTgrM4 z!{YiTdH9N^tqj#Dw4CUVA}U7ys?{yRX=(8N>mHyZe3i+VvW0`MDE8*gd(2`P3ULna zfBb|ZpLl9b#=ev%Xr5SLnu2GvG=NUyw_k(G69=c?+cJXVQ142}v8pMZxEW@1S3EfF z*dFmB``v!z7lT4ejPq2k1nBWgjDdK_e8Or>j@Wp4)CO>&_-hx4PsUCy_KZG0EiroG zG+)AVIN7z!GY#j4>n{-YkCIb^MheKv8K*11!FrZ9ac}WoRm4;iKz)OR&}TNVPyC`r zBc52hBl8le3%~ZEsck#HkyH+p{08rQ+`6p7i)mQufAxId7%HlVra>^udnd%P^ zxXSbMXnXsBXJOUtM2QIRVTpw^>-Hgy1;@U@jw3+BMm zlQ8I}D1+k_Bc)$LvYXX#)?Dtodog-;)9)&Yn=>^D|sBv*vq9D7z82tnDu_g6KT8Gmccn125?TD0iiOcJ9W1eNlS)7gm@M zUzF$Ip_JB&xZZ*f;-Te?Qvj~LG)e4^+*k263ElEnGSKq_=VednX`DEOV#BYNC5~dF zi3jh@w)JhIBV+YZOan*D3WZkTSd`}tMD%R#SQj0Dx+ES*y0wHWNFD@Ai#bkLaNcB* zVL{f#A|tnF10pW^@cE!Y@QY{`@O=P$DQJ9s8WG;q^1}Fyub>lrupkv`qRcq6**tn2 zdKqq&c|QNZ2Qr|=}b66Jr32l0V@@m9HS_j$iiOFc*l{9jP+qqP}80(rOEgu zv|g0pQoI5%LtX*5V&}mvd;!klg@16lps8@iDKE0Pe|K1vc~1_nnPQdGGny^DkPw>d zsjY@P`gex+wOo?Z(XV*Hf#Y)f+s?7itV&Lvi zq1;cllL=xg_#1V?X@3}YtV-`;P`9iyWMoCmJ*-I{3l3}hZZ+0EcnGJ;wPXTa#iIakM;S%U+LIZgvFFfH-Fp5%~fl2F@I2r~@PHn9;&>T$GuP9xJ=yJG4_pe`Vz67wDOUC|1!O#DhdNlJ|Nz(rN zLR2g>Kj7&kI|xjbZIS&O@E-4>ajgXdr9I*i`zJqfeqX2>vgf2>8s%RC8sZozafC17 zP5K#3Mz6+}`4d^5#{pn3epSzjq6n0hBrqYXK6h>2KIK5i-y8U^Of3Xz$jhFkXm++| zR7-?#4P=Nz=RS)j230Df23nkxE8M?v41H#RoUA?9XzaPdX%Ihi64!|mQSnLu=@{9w zlpxmN*nky91HnK%=*h$u6>A)D?5t$Aj4uF&OBO{(p5b9Y+Z;&0`7ZdhEOB;~i@svi zA?h=}Dn!sTPr~u9)!^9leU{u5WDnFlTCNuv!=r|P|7AGRH6Xi2Hf%6(_mBnA*XGZK zHa)hKztpOR5cW>F9m!+`g`$9CIw@)(7)Ka(v^wu;9~y%;)g)(B z(5XgCk7T*^MlTgbb6!Mg#1GO4n9DE!&tfI=h&)9nW5>z6UD|Jpk6M=OAgvSL3_7XO zaUg*r*`amqr#6EuFjCC*+mLLEfNcB{gX}5$cv=R}Fexz7x~RLa9jZaccB=6B#w+oz zZCmK9lpk2F*ws`y*;i|{(5{XmML5B0-Bd3PTS07SY8&Rtko&n@o9rL44YDkWc}7Q= z&O}pEH9Kcs#5uRW2YsKooQ({H!NVCJZy2_AVY`X%o0VZK9BxZ9kBmf_#4)ICoUP*d z%NChhkO%DOUB~aPCOtvU)#3n{Y<1DtDZI|Fw{J?m7vI#)^6LDyyvJUa0T^X>h9x~S z$K?!{4`=NVT@=WL{j_*e?9AcIlzEw(p)~78F=+6h`Y!Hh)|^oU9TK4zd8yLhQm{x) z0Ud^DVi8gf??Gf~zuvmXBp_`Icge61s7MTCR>{nqTWj_FmRX6=#*D>0igB4%w?-q& z>;L+K#4uUnbSnKJfHEl<^mkwJ$^u}@v&RGC>NZZ0-NJcVAM|&(Se2k-vq#o7ov;k2 zRA6_F;;egqfxyV#oh_sTPTTjv0yr*ef#V__4|rF*mPvIbB6ALaAcf=7Bh#R{%bv^_ zS~40}wZ=+<0r(;_(b zIi>{6rB6>LDSW$7$!JJ>zr}bj%sTg|cu;>=ADz#M1{*g^oU054qQMB^iW5$;sGXII z<$Wb<-qf2nSHY6&?f^G`#AmSRiri9_wFPykb+dr$Xo9cERC@i|l#DkQ*T8aZ>D;{) zaT#YjcN{e!R{ic-J|3XObEOWTKO2jwRTm0~AvwOQj2))}jWL_4jQDrd4g#z^g9J^-Z0w2#gMn(v<)~Vel_`^_~iLHUJD7W^UQ5q-6)>(O2TTDX1|$`G`nhX zi~@ist6ok4-XZyu=DMA3Oce%FQpc;o%FuzjT`kWn zWEUQ?ND+pi&f)!0@?H*KnkQTPnvP6oTys0Q{q%5U{IT?rNuXccVj#ha ziSgoFcE}$3L>Vw_@-r-_;en^<@7N7TrznpyDj+4UHmM4J=UWUGSYbfOy>2h0`z)Sc zDZszQ+)GC!W~uO&qR`&cbTUSpmgH`K50TJ8~jYThf zB8b)Mylm&}xSeAAvTi0!r%}N2l}3hwQk#rHmtHaf7Z2-h$%U+5r)(NMNj~^)qxdul zD69+yM5%zWwhA{xw87a{eOA9!#XX){bKc&ZE=h109LAH@Cdzw5t>YIg#npcwzHN9m_dpXc`Rc8IUv#!z@|x3K~|{Y*)2b+ zK)F#D4uSHr-qJBw%3}0e)@}4;EOkL<@ zJd2fRRDHdnfo%N{5+Q33(Rst-`6N(5r(bOhc)KCmHYi^$Kqv}wL5}TI*&wMF>FiOR zrfi{AtgrlzVQ9Sk&aXC4%2$5XLMSUu>Ll}i9plN^E!x$zgxQT>Ef&9H16$DJFO?Gj zoJl=$!cpU4P5#wyF;!^w80VQf*~HKjUsN9BNIa(}TAjQwZg+5;|%B5xmwO zd&5?0Hm?Tp2$83@ySrF2N=+q4xjKc9&(pSY8d@=>)4+ZzUn(-IZRrtFr2>>{WFl#5 z!7*cG?K55oSN%3;#nKm>hB+-TLcao(Ej6D(k@nqoQ^0~RE4DjwF?~GrSJeXKj;5w? zIYDW?7z4yD@G_!`GSgpLNz|}_A{fUo+<*;7rRPVzfi&gE%un8xiE6H`tTN<1H7TH# zQB5)cI*<)3yAp4Wp9srBBZWbH)aCgoSWM0aIwD_kA3SQtXOP5@MJCtwpH_bUM8(r^ zRiE)VbMg{hojewT3F8IUt6ObjQHk0}b^s!#c*pgPb=48M7M7)hR7LZW`pKDiE{J7u zKGPDXel(wH8RM^thp=0{oA1$Tt)0tzEhIv9KV&1c=lzf*AT^$2cBk=aCs4P>c)9Wn zYNUVBEv@`&U-8Pxt75#}awDUT#+xDZJlrTKX{u37L3i3Voc1DirMU8J0ehsXKxj*7ZW{i#0|_ww5I0zV=CkY%6Q>Fx{^TX&(;FLpCbt$cr|!Xy@1?R?hlT1DeA zKm^?h)zqT~lLG?HY>Jo0+gFUZD7Q7BnZz(ih||8B{V!859?yx9wOhEt;)R=Ss;HYA zG7=GK6Y{OJJqUkU$>=9K#CNdAn)}{uFBndH-b$u-=g)OOTwl1bb$lep9qnKgmT?|E zm}m^pGSCjjcb@j_raFQCZvQ`D=;h6qnHx016{->VM`H6uxUjstn&L(NcX7j`YDm;H_`_)Q>Bu? zL9=Gf%RIM7tQxR#`KGuR5@GnzO~W4NYObxbXf!s@&5+24m6|nCaqWn1 zVFI2gd^$aV%6`72TH@CQ*)4QEJhI03kT1Y)hHMBe{Z}vtc2>yRXi32HymWeiVe&m>iIFl>dNN|G%HT{m)N>V}D}#-V0|Nx+SY{`JIja-7~=@ z7P|#_v{ZJ3s0B?tY}o!<2+`Uk|8XCjVtI)NAV`%y zg=jEd=n5>!n)>oR#_EGW8EYi&Ts@ejUJ5Bx18x z@g$PbAW5b#>2Eqslbo0jol=N`KH?OW~ul{m5bMkAAyo z?51cs^vL4*u(oTp$bf+RJXBjn?RtGy&sJ;E@#|dsC6c7I{sdA#Oev z{X%Gm1eC2BkT9>HV8hLt9Mcvu`+~EwEGdl={|Bq+a^j+W^Ic;>W~HXxCcfylEC_5K z3Y+N$|0T3shdv?(&<-knXmaLU092(;$UfXk(d~?-%UUmn(|!@FAa?TD7Ynk3M{y5m z*jDKQn1)5_lxohGigJ0F?H{4_ewc#$?R<&rD%~xD4I~~4MPvq$=@31Vo)XBv2 zR4s98A^ERLa6w^O&1up1vAwne-_rMm@P`6hq0>3dxb~AwGU`-DnP^IF6%6a>!qq)J z;L}pG-PC>MJ#dWl?F-zTlVovlSL+AJih6{BW)Xh<;Tu7!SaNS-OG# z(4L>EfL~@TBel-`0z4RKaqdzQ7TWU^Y}|*CNZ}$syVmB5o_gVFcxh0GzfwuA20a?D zNuIEJ%bd>#f>;S!?S{jAGNl|getR{DadCVB<1^V92CYdF>=oO$znD&Mcb>z>9F^T* z7mw81?B^>k6{gr>i8=%8n>0S~$SR=(#S(QM#Dm9xsZ3Shh#5Vx^YJv#W8j}fic*~8 zVL*^%CbbX$6m^L@kjF^r6x)(e!~KBcwRDI>Q&dQtI#RLPNthOEua= z+v?A;QqyO+aeq%^i)8oXrR`c?5wvM}&&+?G74nvs!>8ec@mV<@wF9Pom46bz(2Gii z$Q8`xRj>?LtOkcr7K=fQOEWE}V6~{JU>nwM+(CRrk3maY1hl|ZIA87tIus*fy9;`2 z!J9E8!GIk!Mgo6eheC;$l?Vvl#m`1(@o>>}`b?ox2&uDD&Ec z=0-iVz%Z0bl)C06+pyr3z<}0l{>gjDGe$J=1_KvMu;p5goWwnE=PKWjJ&o%o$E5=! zMN(YT$y~HiTCGfGmE+~1CK=d#%cFmgUy6xNDDy_+rOs9k4#jS)e;A14q8|X@0o!g)&!$=`2A#>~8EGkqpu+)9$b+ zT!hvuJ}~&rdd-Ac>F+k5(F^^<4TtSNR)w{#zsm^X4s+f!;CY2Ujd-R@`?0k6Ni3p9 zgCb_BY+lkPpt&2BjP-cPiG!QXJnoA;mV!k|B^7g=ICE|U{7Uuxt}NvSTLg<^89vRYG`@F^ZAa9MId|N~fnUe~`<~>-# zkI;?$yhg|G|KJOjr7lR#U@zo05w|D$UD@4u)ONLrBI@Y<{M`&jS@h#C&L5Flv16<= zO#NAGCd@f1{sZKcvs+Iep@XK`JWdA8}o${p@+e3H}KXq?&}bssWO# zPq7L)3Mn9B%yBgGx)wohyw}RzE>cy#Fe|Ei(YW)NA>GOTJh-c|CDfRq;OR!i!bB)@ zn}${%*NO~~#9bce`AM33LQ<1DTC&{#nz|sPj5W5ZrO|42&%Z$2G6fLXc3eVqdnZDN zM*m9u08T9S7dm@@X>XGTUEJ*kXhAkD#UyUoKI~|DEq<(@JV(uk%%;-s7QGthD|lqU zn4h5np)5fZamQfj?v)OtLJS7ZF#7Fhp3bKlhsmgF91;-@i0G)iiArFK+$3Uz2S7YV zER>brYqBQQ5$=K~@J|hUVbBnwLoekirj6e7X3&04qE1JWrd||1%h;lR-=hk+%BN}< zJ$3hN*_{G`CPFMI_)VW$Kdz0*2UC3=`&bhhzQnr|+EH)LagOrJz!Cj6=#V z`5o{anKrP*od4u|EfxSt=y9y?-+$^zObJGGXCq$r(MC`I=EE5qgBf3JE0VH)Qd863 zQ3ug7gJ@uj8|XYTFg_xy(@;G5v&(3)0DnM$zX))!l5@P?#J7oG@FbM|V`0dP@4av@ zLp@!Rw;`rF!}_8HS%r0M!>I`9R9&Y2eN}a5|t5ZUEBFj3GT409c{KvP2AuV9nTZ4 zIC3jTfF)KKFMw`(N43RNA1t4|1NRSASS)Y#e1YSBPN^n6y@YVZSpsL+KQ#8Zt$1)Q zncfl`TUX6Cme}h|LQ5L0;Qx@=*0Ff--skxAO=$WmzX-!g#!gz0DN?;OkTS1+Yf=@) z0)Mx5ODpJ?{oF#)v??@1P$AP*qPK;X=c#B&VMay9%xg((RBY?aEp_UGTMKE^HE3w- zO>`S8s1~yCKv?^m$q9v*D@oUOnmjzS^OGw@|e)Fu(Y%v z;>z!vH_G#71%2nh8ykq#^OYnsP2ao8NU)6klQTH5rFE_J;AydpDUN>p;eB0k8nrp+ zKwomrMz2Fd-icSvQV!Le&$}j-^af@#ZB#X)%yiKd%Tno9KzhjW-3vjJCamTq8^1XX zQSdV7YzGTQ)XgK$=K{F`Tx+yg8}5DuoYAw0Q*;A1`@|esUmfsilPD2wCE3}&2KbA2 zObf<(4UDV+K&UG!V;BsLHdXvzMi&0@8e0GqGmc_18^%ratn-_+m zpy^W!$|sBT@8~X@{cX+N@#ApA-MXLL`FC{%_t=#_R9JQqa|?`@f6-t~K2cBpMIl2$ zgCBx(Dm^kuL^0}IretyI+i7Y#%`-q1ZJD3Sz$QzWtxxxW8m6Opa_<4ADSh|gu`SK~A;W(aY0TAPqrIJ<M(F*3=*YenURR4vN!tooL&lywcj->5SRCXwK0L1Wk;8>k@lVPR$hr;7sh zSTG@Dz$n@VQ9B(Hf?&ury&KF-WgraBoGle8Q-RySY%YNMcpQmiaGp$6`|JDxpcvie zpS|U;yWiGDE^-#zy4TrQqSBf`52pnt_qsDfK^5=HzHgSbG6v2(D2`v)(|m#A-9!lg z74%c7(+9T50&;#I|7p~rA-EgYUO&yiA}*)Nla4;@z2loSbdz9AxMw`$^6z zw2EtEcfRkg=AdZ&#EPEj67;3;IL;q7)mPdwp-&RJ=EJFJ^!_~XO2eYFF!fcnB(rU7 zs=Jy6eVLfsSd&}h6LWfAm;<^WXz5uzMjOVNu)wlanrRKPmvMy?Kuv*3`Dy{Ma};%A zdm?X;91K8Jg&9JmGj?*H*C@-Z?7X0TJLey5TO)-W#WklTdKTIuS0XFZ+@vH;4osqn zW0z>ec1iusDU}?oZJ`~7jFCr0aLG-c4dM&(agDPrcn+VoVAP3v>{40Y25;%Urdl2W zh#;S_M49dt)k}FgSt@4?k_sQN6N^Qb3eEWkLnsO-BiFZrlXby?#cK6z|I(zfgfoiQ zjcZY{gmBGi`l~_ik_w-WE2WzFt9pnE=i1me4ZgLqzpG=VeO~YdRdpAIFn~z$R*ywj zG#GU8eq3|lLIUxD>#7vnQIV}>P+4+iWbH$E0v01`{zC9U<)A3+;M|sex`=~o&7