diff --git a/docs/api_client/client.rst b/docs/api_client/client.rst new file mode 100644 index 0000000..d219e2d --- /dev/null +++ b/docs/api_client/client.rst @@ -0,0 +1,9 @@ +Platform API Client +========================== + +.. currentmodule:: enlyze.api_client.client + +.. autoclass:: _PaginatedResponse + +.. autoclass:: PlatformApiClient() + :members: diff --git a/docs/api_clients/timeseries/index.rst b/docs/api_client/index.rst similarity index 100% rename from docs/api_clients/timeseries/index.rst rename to docs/api_client/index.rst diff --git a/docs/api_clients/production_runs/models.rst b/docs/api_client/models.rst similarity index 52% rename from docs/api_clients/production_runs/models.rst rename to docs/api_client/models.rst index 03581d4..1a3048d 100644 --- a/docs/api_clients/production_runs/models.rst +++ b/docs/api_client/models.rst @@ -1,22 +1,47 @@ Models ====== -.. currentmodule:: enlyze.api_clients.production_runs.models +.. currentmodule:: enlyze.api_client.models -.. autoclass:: ProductionRunsApiModel() +.. autoclass:: PlatformApiModel() -.. autoclass:: ProductionRun() +.. autoclass:: Site() + :members: + :undoc-members: + :exclude-members: model_config, model_fields + :show-inheritance: + +.. autoclass:: MachineBase() :members: :undoc-members: :exclude-members: model_config, model_fields :show-inheritance: + .. autoclass:: Machine() :members: :undoc-members: :exclude-members: model_config, model_fields :show-inheritance: +.. autoclass:: Variable() + :members: + :undoc-members: + :exclude-members: model_config, model_fields + :show-inheritance: + +.. autoclass:: TimeseriesData() + :members: + :undoc-members: + :exclude-members: model_config, model_fields + :show-inheritance: + +.. autoclass:: ProductionRun() + :members: + :undoc-members: + :exclude-members: model_config, model_fields + :show-inheritance: + .. autoclass:: Quantity() :members: :undoc-members: diff --git a/docs/api_clients/base.rst b/docs/api_clients/base.rst deleted file mode 100644 index 1f7ef06..0000000 --- a/docs/api_clients/base.rst +++ /dev/null @@ -1,17 +0,0 @@ -Base Client -=========== - -.. currentmodule:: enlyze.api_clients.base - -.. autoclass:: M - -.. autoclass:: R - -.. autoclass:: ApiBaseModel - -.. autoclass:: PaginatedResponseBaseModel - -.. autoclass:: ApiBaseClient - :members: - :private-members: - :undoc-members: diff --git a/docs/api_clients/index.rst b/docs/api_clients/index.rst deleted file mode 100644 index 91ec8e1..0000000 --- a/docs/api_clients/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -API Clients -=========== - -.. toctree:: - :maxdepth: 1 - - base - timeseries/index - production_runs/index diff --git a/docs/api_clients/production_runs/client.rst b/docs/api_clients/production_runs/client.rst deleted file mode 100644 index 2c5f92f..0000000 --- a/docs/api_clients/production_runs/client.rst +++ /dev/null @@ -1,9 +0,0 @@ -Production Runs API Client -========================== - -.. currentmodule:: enlyze.api_clients.production_runs.client - -.. autoclass:: _PaginatedResponse - -.. autoclass:: ProductionRunsApiClient() - :members: diff --git a/docs/api_clients/production_runs/index.rst b/docs/api_clients/production_runs/index.rst deleted file mode 100644 index 34a3568..0000000 --- a/docs/api_clients/production_runs/index.rst +++ /dev/null @@ -1,8 +0,0 @@ -Production Runs API -=================== - -.. toctree:: - :maxdepth: 1 - - client - models diff --git a/docs/api_clients/timeseries/client.rst b/docs/api_clients/timeseries/client.rst deleted file mode 100644 index fb84a40..0000000 --- a/docs/api_clients/timeseries/client.rst +++ /dev/null @@ -1,11 +0,0 @@ -Timeseries API Client -===================== - -.. currentmodule:: enlyze.api_clients.timeseries.client - -.. autoclass:: _PaginatedResponse() - :members: - :exclude-members: model_config, model_fields - -.. autoclass:: TimeseriesApiClient() - :members: get, get_paginated diff --git a/docs/api_clients/timeseries/models.rst b/docs/api_clients/timeseries/models.rst deleted file mode 100644 index dd03852..0000000 --- a/docs/api_clients/timeseries/models.rst +++ /dev/null @@ -1,30 +0,0 @@ -Models -====== - -.. currentmodule:: enlyze.api_clients.timeseries.models - -.. autoclass:: TimeseriesApiModel() - -.. autoclass:: Site() - :members: - :undoc-members: - :exclude-members: model_config, model_fields - :show-inheritance: - -.. autoclass:: Machine() - :members: - :undoc-members: - :exclude-members: model_config, model_fields - :show-inheritance: - -.. autoclass:: Variable() - :members: - :undoc-members: - :exclude-members: model_config, model_fields - :show-inheritance: - -.. autoclass:: TimeseriesData() - :members: - :undoc-members: - :exclude-members: model_config, model_fields - :show-inheritance: diff --git a/docs/index.rst b/docs/index.rst index 5d9247a..e7e605a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -20,4 +20,4 @@ User's Guide models errors constants - api_clients/index + api_client/index diff --git a/src/enlyze/api_clients/__init__.py b/src/enlyze/api_client/__init__.py similarity index 100% rename from src/enlyze/api_clients/__init__.py rename to src/enlyze/api_client/__init__.py diff --git a/src/enlyze/api_client/client.py b/src/enlyze/api_client/client.py new file mode 100644 index 0000000..0fb1ac5 --- /dev/null +++ b/src/enlyze/api_client/client.py @@ -0,0 +1,157 @@ +import json +from functools import cache +from http import HTTPStatus +from typing import Any, Iterator, Type, TypeVar + +import httpx +from pydantic import BaseModel, ValidationError + +from enlyze._version import VERSION +from enlyze.auth import TokenAuth +from enlyze.constants import HTTPX_TIMEOUT, PLATFORM_API_SUB_PATH, USER_AGENT +from enlyze.errors import EnlyzeError, InvalidTokenError + +from .models import PlatformApiModel + +T = TypeVar("T", bound=PlatformApiModel) + +USER_AGENT_NAME_VERSION_SEPARATOR = "/" + + +@cache +def _construct_user_agent( + *, user_agent: str = USER_AGENT, version: str = VERSION +) -> str: + return f"{user_agent}{USER_AGENT_NAME_VERSION_SEPARATOR}{version}" + + +class _Metadata(BaseModel): + next_cursor: str | None + + +class _PaginatedResponse(BaseModel): + metadata: _Metadata + data: list[dict[str, Any]] | dict[str, Any] + + +class PlatformApiClient: + """Client class encapsulating all interaction with the ENLYZE platform API + + :param token: API token for the ENLYZE platform API + :param base_url: Base URL of the ENLYZE platform API + :param timeout: Global timeout for all HTTP requests sent to the ENLYZE platform API + + """ + + def __init__( + self, + *, + token: str, + base_url: str | httpx.URL, + timeout: float = HTTPX_TIMEOUT, + ): + self._client = httpx.Client( + auth=TokenAuth(token), + base_url=httpx.URL(base_url).join(PLATFORM_API_SUB_PATH), + timeout=timeout, + headers={"user-agent": _construct_user_agent()}, + ) + + @cache + def _full_url(self, api_path: str) -> str: + """Construct full URL from relative URL""" + return str(self._client.build_request("", api_path).url) + + def get(self, api_path: str, **kwargs: Any) -> Any: + """Wraps :meth:`httpx.Client.get` with defensive error handling + + :param api_path: Relative URL path inside the API name space (or a full URL) + + :raises: :exc:`~enlyze.errors.EnlyzeError` on request failure + + :raises: :exc:`~enlyze.errors.EnlyzeError` on non-2xx status code + + :raises: :exc:`~enlyze.errors.EnlyzeError` on non-JSON payload + + :returns: JSON payload of the response as Python object + + """ + try: + response = self._client.get(api_path, **kwargs) + except Exception as e: + print(e) + raise EnlyzeError( + "Couldn't read from the ENLYZE platform API " + f"(GET {self._full_url(api_path)})", + ) from e + + try: + response.raise_for_status() + except httpx.HTTPStatusError as e: + if e.response.status_code in ( + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + ): + raise InvalidTokenError + else: + raise EnlyzeError( + f"ENLYZE platform API returned error {response.status_code}" + f" (GET {self._full_url(api_path)})" + ) from e + + try: + return response.json() + except json.JSONDecodeError as e: + raise EnlyzeError( + "ENLYZE platform API didn't return a valid JSON object " + f"(GET {self._full_url(api_path)})", + ) from e + + def get_paginated( + self, api_path: str, model: Type[T], **kwargs: Any + ) -> Iterator[T]: + """Retrieve objects from paginated ENLYZE Platform API endpoint via HTTP GET + + :param api_path: Relative URL path inside the ENLYZE Platform API + :param model: Class derived from + :class:`~enlyze.api_client.models.PlatformApiModel` + :raises: :exc:`~enlyze.errors.EnlyzeError` on invalid pagination schema + :raises: :exc:`~enlyze.errors.EnlyzeError` on invalid data schema + :raises: see :py:meth:`get` for more errors raised by this method + :returns: Instances of ``model`` retrieved from the ``api_path`` endpoint + """ + + params = kwargs.pop("params", {}) + + while True: + response_body = self.get(api_path, params=params, **kwargs) + + try: + paginated_response = _PaginatedResponse.model_validate(response_body) + except ValidationError as e: + raise EnlyzeError( + f"Paginated response expected (GET {self._full_url(api_path)})" + ) from e + + page_data = paginated_response.data + if not page_data: + break + + # if `data` is a list we assume there are multiple objects inside. + # if `data` is a dict then we treat it as only one object + page_data = page_data if isinstance(page_data, list) else [page_data] + + for elem in page_data: + try: + yield model.model_validate(elem) + except ValidationError as e: + raise EnlyzeError( + f"ENLYZE platform API returned an unparsable {model.__name__} " + f"object (GET {self._full_url(api_path)})" + ) from e + + next_cursor = paginated_response.metadata.next_cursor + if next_cursor is None: + break + + params = {**params, "cursor": next_cursor} diff --git a/src/enlyze/api_client/models.py b/src/enlyze/api_client/models.py new file mode 100644 index 0000000..d15d3bf --- /dev/null +++ b/src/enlyze/api_client/models.py @@ -0,0 +1,213 @@ +from datetime import date, datetime, timedelta +from typing import Any, Optional, Sequence +from uuid import UUID + +from pydantic import BaseModel + +import enlyze.models as user_models + + +class PlatformApiModel(BaseModel): + """Base class for Enlyze Platform API object models using pydantic + + All objects received from the Enlyze Platform API are passed into models + that derive from this class and thus use pydantic for schema definition + and validation. + + """ + + +class Site(PlatformApiModel): + uuid: UUID + name: str + address: str + + def to_user_model(self) -> user_models.Site: + """Convert into a :ref:`user model `""" + + return user_models.Site( + uuid=self.uuid, + address=self.address, + display_name=self.name, + ) + + +class MachineBase(PlatformApiModel): + """The machine related information returned for a + :class:`.ProductionRun`""" + + name: str + uuid: UUID + + +class Machine(MachineBase): + genesis_date: date + site: UUID + + def to_user_model(self, site: user_models.Site) -> user_models.Machine: + """Convert into a :ref:`user model `""" + + return user_models.Machine( + uuid=self.uuid, + display_name=self.name, + genesis_date=self.genesis_date, + site=site, + ) + + +class Variable(PlatformApiModel): + uuid: UUID + display_name: Optional[str] + unit: Optional[str] + data_type: user_models.VariableDataType + + def to_user_model(self, machine: user_models.Machine) -> user_models.Variable: + """Convert into a :ref:`user model `.""" + + return user_models.Variable( + uuid=self.uuid, + display_name=self.display_name, + unit=self.unit, + data_type=self.data_type, + machine=machine, + ) + + +class TimeseriesData(PlatformApiModel): + columns: list[str] + records: list[Any] + + def extend(self, other: "TimeseriesData") -> None: + """Add records from ``other`` after the existing records.""" + self.records.extend(other.records) + + def merge(self, other: "TimeseriesData") -> "TimeseriesData": + """Merge records from ``other`` into the existing records.""" + slen, olen = len(self.records), len(other.records) + if olen < slen: + raise ValueError( + "Cannot merge. Attempted to merge" + f" an instance with {olen} records into an instance with {slen}" + " records. The instance to merge must have a number" + " of records greater than or equal to the number of records of" + " the instance you're trying to merge into." + ) + + self.columns.extend(other.columns[1:]) + + for s, o in zip(self.records, other.records[:slen]): + if s[0] != o[0]: + raise ValueError( + "Cannot merge. Attempted to merge records " + f"with mismatched timestamps {s[0]}, {o[0]}" + ) + + s.extend(o[1:]) + + return self + + def to_user_model( + self, + start: datetime, + end: datetime, + variables: Sequence[user_models.Variable], + ) -> user_models.TimeseriesData: + return user_models.TimeseriesData( + start=start, + end=end, + variables=variables, + _columns=self.columns, + _records=self.records, + ) + + +class OEEComponent(PlatformApiModel): + score: float + time_loss: int + + def to_user_model(self) -> user_models.OEEComponent: + """Convert into a :ref:`user model `""" + + return user_models.OEEComponent( + score=self.score, + time_loss=timedelta(seconds=self.time_loss), + ) + + +class Product(PlatformApiModel): + code: str + name: Optional[str] + + def to_user_model(self) -> user_models.Product: + """Convert into a :ref:`user model `""" + + return user_models.Product( + code=self.code, + name=self.name, + ) + + +class Quantity(PlatformApiModel): + unit: str | None + value: float + + def to_user_model(self) -> user_models.Quantity: + """Convert into a :ref:`user model `""" + + return user_models.Quantity( + unit=self.unit, + value=self.value, + ) + + +class ProductionRun(PlatformApiModel): + uuid: UUID + machine: MachineBase + average_throughput: Optional[float] + production_order: str + product: Product + start: datetime + end: Optional[datetime] + quantity_total: Optional[Quantity] + quantity_scrap: Optional[Quantity] + quantity_yield: Optional[Quantity] + availability: Optional[OEEComponent] + performance: Optional[OEEComponent] + quality: Optional[OEEComponent] + productivity: Optional[OEEComponent] + + def to_user_model( + self, machines_by_uuid: dict[UUID, user_models.Machine] + ) -> user_models.ProductionRun: + """Convert into a :ref:`user model `""" + + quantity_total = ( + self.quantity_total.to_user_model() if self.quantity_total else None + ) + quantity_scrap = ( + self.quantity_scrap.to_user_model() if self.quantity_scrap else None + ) + quantity_yield = ( + self.quantity_yield.to_user_model() if self.quantity_yield else None + ) + availability = self.availability.to_user_model() if self.availability else None + performance = self.performance.to_user_model() if self.performance else None + quality = self.quality.to_user_model() if self.quality else None + productivity = self.productivity.to_user_model() if self.productivity else None + + return user_models.ProductionRun( + uuid=self.uuid, + machine=machines_by_uuid[self.machine.uuid], + average_throughput=self.average_throughput, + production_order=self.production_order, + product=self.product.to_user_model(), + start=self.start, + end=self.end, + quantity_total=quantity_total, + quantity_scrap=quantity_scrap, + quantity_yield=quantity_yield, + availability=availability, + performance=performance, + quality=quality, + productivity=productivity, + ) diff --git a/src/enlyze/api_clients/base.py b/src/enlyze/api_clients/base.py deleted file mode 100644 index 1eec15c..0000000 --- a/src/enlyze/api_clients/base.py +++ /dev/null @@ -1,231 +0,0 @@ -import json -from abc import ABC, abstractmethod -from collections.abc import Iterator -from functools import cache -from http import HTTPStatus -from typing import Any, Generic, TypeVar - -import httpx -from pydantic import BaseModel, ValidationError - -from enlyze._version import VERSION -from enlyze.auth import TokenAuth -from enlyze.constants import HTTPX_TIMEOUT, USER_AGENT -from enlyze.errors import EnlyzeError, InvalidTokenError - -USER_AGENT_NAME_VERSION_SEPARATOR = "/" - - -@cache -def _construct_user_agent( - *, user_agent: str = USER_AGENT, version: str = VERSION -) -> str: - return f"{user_agent}{USER_AGENT_NAME_VERSION_SEPARATOR}{version}" - - -class ApiBaseModel(BaseModel): - """Base class for ENLYZE platform API object models using pydantic - - All objects received from ENLYZE platform APIs are passed into models that derive - from this class and thus use pydantic for schema definition and validation. - - """ - - -class PaginatedResponseBaseModel(BaseModel): - """Base class for paginated ENLYZE platform API responses using pydantic.""" - - data: Any - - -#: TypeVar("M", bound=ApiBaseModel): Type variable serving as a parameter -# for API response model classes. -M = TypeVar("M", bound=ApiBaseModel) - - -#: TypeVar("R", bound=PaginatedResponseBaseModel) Type variable serving as a parameter -# for paginated response models. -R = TypeVar("R", bound=PaginatedResponseBaseModel) - - -class ApiBaseClient(ABC, Generic[R]): - """Client base class encapsulating all interaction with all ENLYZE platform APIs. - - :param token: API token for the ENLYZE platform - :param base_url: Base URL of the ENLYZE platform - :param timeout: Global timeout for HTTP requests sent to the ENLYZE platform APIs - - """ - - PaginatedResponseModel: type[R] - - def __init__( - self, - *, - token: str, - base_url: str | httpx.URL, - timeout: float = HTTPX_TIMEOUT, - ): - self._client = httpx.Client( - auth=TokenAuth(token), - base_url=httpx.URL(base_url), - timeout=timeout, - headers={"user-agent": _construct_user_agent()}, - ) - - @cache - def _full_url(self, api_path: str) -> str: - """Construct full URL from relative URL""" - return str(self._client.build_request("", api_path).url) - - def get(self, api_path: str, **kwargs: Any) -> Any: - """Wraps :meth:`httpx.Client.get` with defensive error handling - - :param api_path: Relative URL path inside the API name space (or a full URL) - - :raises: :exc:`~enlyze.errors.EnlyzeError` on request failure - - :raises: :exc:`~enlyze.errors.EnlyzeError` on non-2xx status code - - :raises: :exc:`~enlyze.errors.EnlyzeError` on non-JSON payload - - :returns: JSON payload of the response as Python object - - """ - - try: - response = self._client.get(api_path, **kwargs) - except Exception as e: - raise EnlyzeError( - "Couldn't read from the ENLYZE platform API " - f"(GET {self._full_url(api_path)})", - ) from e - - try: - response.raise_for_status() - except httpx.HTTPStatusError as e: - if e.response.status_code in ( - HTTPStatus.UNAUTHORIZED, - HTTPStatus.FORBIDDEN, - ): - raise InvalidTokenError - else: - raise EnlyzeError( - f"ENLYZE platform API returned error {response.status_code}" - f" (GET {self._full_url(api_path)})" - ) from e - - try: - return response.json() - except json.JSONDecodeError as e: - raise EnlyzeError( - "ENLYZE platform API didn't return a valid JSON object " - f"(GET {self._full_url(api_path)})", - ) from e - - def _transform_paginated_response_data(self, data: Any) -> Any: - """Transform paginated response data. Returns ``data`` by default. - - :param data: Response data from a paginated response - - :returns: An iterable of transformed data - - """ - return data - - @abstractmethod - def _has_more(self, paginated_response: R) -> bool: - """Indicates there is more data to fetch from the server. - - :param paginated_response: A paginated response model deriving from - :class:`PaginatedResponseBaseModel`. - - """ - - @abstractmethod - def _next_page_call_args( - self, - *, - url: str, - params: dict[str, Any], - paginated_response: R, - **kwargs: Any, - ) -> tuple[str, dict[str, Any], dict[str, Any]]: - r"""Compute call arguments for the next page. - - :param url: The URL used to fetch the current page - :param params: URL query parameters of the current page - :param paginated_response: A paginated response model deriving from - :class:`~enlyze.api_clients.base.PaginatedResponseBaseModel` - :param \**kwargs: Keyword arguments passed into - :py:meth:`~enlyze.api_clients.base.ApiBaseClient.get_paginated` - - :returns: A tuple of comprised of the URL, query parameters and keyword - arguments to fetch the next page - - """ - - def get_paginated( - self, api_path: str, model: type[M], **kwargs: Any - ) -> Iterator[M]: - """Retrieve objects from a paginated ENLYZE platform API endpoint via HTTP GET. - - To add pagination capabilities to an API client deriving from this class, two - abstract methods need to be implemented, - :py:meth:`~enlyze.api_clients.base.ApiBaseClient._has_more` and - :py:meth:`~enlyze.api_clients.base.ApiBaseClient._next_page_call_args`. - Optionally, API clients may transform page data by overriding - :py:meth:`~enlyze.api_clients.base.ApiBaseClient._transform_paginated_response_data`, - which by default returns the unmodified page data. - - :param api_path: Relative URL path inside the API name space - :param model: API response model class deriving from - :class:`~enlyze.api_clients.base.ApiBaseModel` - - :raises: :exc:`~enlyze.errors.EnlyzeError` on invalid pagination schema - - :raises: :exc:`~enlyze.errors.EnlyzeError` on invalid data schema - - :raises: see :py:meth:`get` for more errors raised by this method - - :returns: Instances of ``model`` retrieved from the ``api_path`` endpoint - - """ - - url = api_path - params = kwargs.pop("params", {}) - - while True: - response_body = self.get(url, params=params, **kwargs) - try: - paginated_response = self.PaginatedResponseModel.model_validate( - response_body - ) - except ValidationError as e: - raise EnlyzeError( - f"Paginated response expected (GET {self._full_url(url)})" - ) from e - - page_data = paginated_response.data - if not page_data: - break - - page_data = self._transform_paginated_response_data(page_data) - - for elem in page_data: - try: - yield model.model_validate(elem) - except ValidationError as e: - raise EnlyzeError( - f"ENLYZE platform API returned an unparsable {model.__name__} " - f"object (GET {self._full_url(api_path)})" - ) from e - if not self._has_more(paginated_response): - break - - url, params, kwargs = self._next_page_call_args( - url=url, - params=params, - paginated_response=paginated_response, - **kwargs, - ) diff --git a/src/enlyze/api_clients/production_runs/client.py b/src/enlyze/api_clients/production_runs/client.py deleted file mode 100644 index 6610913..0000000 --- a/src/enlyze/api_clients/production_runs/client.py +++ /dev/null @@ -1,55 +0,0 @@ -from typing import Any - -import httpx -from pydantic import BaseModel - -from enlyze.api_clients.base import ApiBaseClient, PaginatedResponseBaseModel -from enlyze.constants import PRODUCTION_RUNS_API_SUB_PATH - - -class _Metadata(BaseModel): - next_cursor: int | None - has_more: bool - - -class _PaginatedResponse(PaginatedResponseBaseModel): - metadata: _Metadata - data: list[dict[str, Any]] - - -class ProductionRunsApiClient(ApiBaseClient[_PaginatedResponse]): - """Client class encapsulating all interaction with the Production Runs API - - :param token: API token for the ENLYZE platform - :param base_url: Base URL of the ENLYZE platform - :param timeout: Global timeout for all HTTP requests sent to the Production Runs API - - """ - - PaginatedResponseModel = _PaginatedResponse - - def __init__( - self, - *, - token: str, - base_url: str | httpx.URL, - **kwargs: Any, - ): - super().__init__( - token=token, - base_url=httpx.URL(base_url).join(PRODUCTION_RUNS_API_SUB_PATH), - **kwargs, - ) - - def _has_more(self, paginated_response: _PaginatedResponse) -> bool: - return paginated_response.metadata.has_more - - def _next_page_call_args( - self, - url: str, - params: dict[str, Any], - paginated_response: _PaginatedResponse, - **kwargs: Any, - ) -> tuple[str, dict[str, Any], dict[str, Any]]: - next_params = {**params, "cursor": paginated_response.metadata.next_cursor} - return (url, next_params, kwargs) diff --git a/src/enlyze/api_clients/production_runs/models.py b/src/enlyze/api_clients/production_runs/models.py deleted file mode 100644 index d52fe06..0000000 --- a/src/enlyze/api_clients/production_runs/models.py +++ /dev/null @@ -1,119 +0,0 @@ -from abc import abstractmethod -from datetime import datetime, timedelta -from typing import Any, Optional -from uuid import UUID - -from pydantic import Field - -import enlyze.models as user_models -from enlyze.api_clients.base import ApiBaseModel - - -class ProductionRunsApiModel(ApiBaseModel): - """Base class for Production Runs API object models using pydantic - - All objects received from the Production Runs API are passed into models that derive - from this class and thus use pydantic for schema definition and validation. - - """ - - @abstractmethod - def to_user_model(self, *args: Any, **kwargs: Any) -> Any: - """Convert to a model that will be returned to the user.""" - - -class OEEComponent(ProductionRunsApiModel): - score: float - time_loss: int - - def to_user_model(self) -> user_models.OEEComponent: - """Convert into a :ref:`user model `""" - - return user_models.OEEComponent( - score=self.score, - time_loss=timedelta(seconds=self.time_loss), - ) - - -class Product(ProductionRunsApiModel): - code: str - name: Optional[str] - - def to_user_model(self) -> user_models.Product: - """Convert into a :ref:`user model `""" - - return user_models.Product( - code=self.code, - name=self.name, - ) - - -class Quantity(ProductionRunsApiModel): - unit: str | None - value: float - - def to_user_model(self) -> user_models.Quantity: - """Convert into a :ref:`user model `""" - - return user_models.Quantity( - unit=self.unit, - value=self.value, - ) - - -class Machine(ApiBaseModel): - name: str - uuid: UUID - - -class ProductionRun(ProductionRunsApiModel): - uuid: UUID - machine: Machine = Field(alias="appliance") - average_throughput: Optional[float] - production_order: str - product: Product - start: datetime - end: Optional[datetime] - quantity_total: Optional[Quantity] - quantity_scrap: Optional[Quantity] - quantity_yield: Optional[Quantity] - availability: Optional[OEEComponent] - performance: Optional[OEEComponent] - quality: Optional[OEEComponent] - productivity: Optional[OEEComponent] - - def to_user_model( - self, machines_by_uuid: dict[UUID, user_models.Machine] - ) -> user_models.ProductionRun: - """Convert into a :ref:`user model `""" - - quantity_total = ( - self.quantity_total.to_user_model() if self.quantity_total else None - ) - quantity_scrap = ( - self.quantity_scrap.to_user_model() if self.quantity_scrap else None - ) - quantity_yield = ( - self.quantity_yield.to_user_model() if self.quantity_yield else None - ) - availability = self.availability.to_user_model() if self.availability else None - performance = self.performance.to_user_model() if self.performance else None - quality = self.quality.to_user_model() if self.quality else None - productivity = self.productivity.to_user_model() if self.productivity else None - - return user_models.ProductionRun( - uuid=self.uuid, - machine=machines_by_uuid[self.machine.uuid], - average_throughput=self.average_throughput, - production_order=self.production_order, - product=self.product.to_user_model(), - start=self.start, - end=self.end, - quantity_total=quantity_total, - quantity_scrap=quantity_scrap, - quantity_yield=quantity_yield, - availability=availability, - performance=performance, - quality=quality, - productivity=productivity, - ) diff --git a/src/enlyze/api_clients/timeseries/__init__.py b/src/enlyze/api_clients/timeseries/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/enlyze/api_clients/timeseries/client.py b/src/enlyze/api_clients/timeseries/client.py deleted file mode 100644 index b3385fd..0000000 --- a/src/enlyze/api_clients/timeseries/client.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import Any, Tuple - -import httpx -from pydantic import AnyUrl - -from enlyze.api_clients.base import ApiBaseClient, PaginatedResponseBaseModel -from enlyze.constants import TIMESERIES_API_SUB_PATH - - -class _PaginatedResponse(PaginatedResponseBaseModel): - next: AnyUrl | None - data: list[Any] | dict[str, Any] - - -class TimeseriesApiClient(ApiBaseClient[_PaginatedResponse]): - """Client class encapsulating all interaction with the Timeseries API - - :param token: API token for the ENLYZE platform - :param base_url: Base URL of the ENLYZE platform - :param timeout: Global timeout for all HTTP requests sent to the Timeseries API - - """ - - PaginatedResponseModel = _PaginatedResponse - - def __init__( - self, - *, - token: str, - base_url: str | httpx.URL, - **kwargs: Any, - ): - super().__init__( - token=token, - base_url=httpx.URL(base_url).join(TIMESERIES_API_SUB_PATH), - **kwargs, - ) - - def _transform_paginated_response_data( - self, paginated_response_data: list[Any] | dict[str, Any] - ) -> list[dict[str, Any]]: - # The timeseries endpoint's response data field is a mapping. - # Because get_paginated assumes the ``data`` field to be a list, - # we wrap it into a list. - return ( - paginated_response_data - if isinstance(paginated_response_data, list) - else [paginated_response_data] - ) - - def _has_more(self, paginated_response: _PaginatedResponse) -> bool: - return paginated_response.next is not None - - def _next_page_call_args( - self, - *, - url: str, - params: dict[str, Any], - paginated_response: _PaginatedResponse, - **kwargs: Any, - ) -> Tuple[str, dict[str, Any], dict[str, Any]]: - next_url = str(paginated_response.next) - return (next_url, params, kwargs) diff --git a/src/enlyze/api_clients/timeseries/models.py b/src/enlyze/api_clients/timeseries/models.py deleted file mode 100644 index 632657f..0000000 --- a/src/enlyze/api_clients/timeseries/models.py +++ /dev/null @@ -1,115 +0,0 @@ -from datetime import date, datetime -from typing import Any, Optional, Sequence -from uuid import UUID - -import enlyze.models as user_models -from enlyze.api_clients.base import ApiBaseModel - - -class TimeseriesApiModel(ApiBaseModel): - """Base class for Timeseries API object models using pydantic - - All objects received from the Timeseries API are passed into models that derive from - this class and thus use pydantic for schema definition and validation. - - """ - - pass - - -class Site(TimeseriesApiModel): - id: int - name: str - address: str - - def to_user_model(self) -> user_models.Site: - """Convert into a :ref:`user model `""" - - return user_models.Site( - _id=self.id, - address=self.address, - display_name=self.name, - ) - - -class Machine(TimeseriesApiModel): - uuid: UUID - name: str - genesis_date: date - site: int - - def to_user_model(self, site: user_models.Site) -> user_models.Machine: - """Convert into a :ref:`user model `""" - - return user_models.Machine( - uuid=self.uuid, - display_name=self.name, - genesis_date=self.genesis_date, - site=site, - ) - - -class Variable(TimeseriesApiModel): - uuid: UUID - display_name: Optional[str] - unit: Optional[str] - data_type: user_models.VariableDataType - - def to_user_model(self, machine: user_models.Machine) -> user_models.Variable: - """Convert into a :ref:`user model `.""" - - return user_models.Variable( - uuid=self.uuid, - display_name=self.display_name, - unit=self.unit, - data_type=self.data_type, - machine=machine, - ) - - -class TimeseriesData(TimeseriesApiModel): - columns: list[str] - records: list[Any] - - def extend(self, other: "TimeseriesData") -> None: - """Add records from ``other`` after the existing records.""" - self.records.extend(other.records) - - def merge(self, other: "TimeseriesData") -> "TimeseriesData": - """Merge records from ``other`` into the existing records.""" - slen, olen = len(self.records), len(other.records) - if olen < slen: - raise ValueError( - "Cannot merge. Attempted to merge" - f" an instance with {olen} records into an instance with {slen}" - " records. The instance to merge must have a number" - " of records greater than or equal to the number of records of" - " the instance you're trying to merge into." - ) - - self.columns.extend(other.columns[1:]) - - for s, o in zip(self.records, other.records[:slen]): - if s[0] != o[0]: - raise ValueError( - "Cannot merge. Attempted to merge records " - f"with mismatched timestamps {s[0]}, {o[0]}" - ) - - s.extend(o[1:]) - - return self - - def to_user_model( - self, - start: datetime, - end: datetime, - variables: Sequence[user_models.Variable], - ) -> user_models.TimeseriesData: - return user_models.TimeseriesData( - start=start, - end=end, - variables=variables, - _columns=self.columns, - _records=self.records, - ) diff --git a/src/enlyze/auth.py b/src/enlyze/auth.py index c3eda89..4f09e28 100644 --- a/src/enlyze/auth.py +++ b/src/enlyze/auth.py @@ -19,7 +19,7 @@ def __init__(self, token: str): if not token: raise InvalidTokenError("Token must not be empty") - self._auth_header = f"Token {token}" + self._auth_header = f"Bearer {token}" def auth_flow(self, request: Request) -> Generator[Request, Response, None]: """Inject token into authorization header""" diff --git a/src/enlyze/client.py b/src/enlyze/client.py index a29f6bf..c9d7d9e 100644 --- a/src/enlyze/client.py +++ b/src/enlyze/client.py @@ -4,11 +4,9 @@ from typing import Any, Iterator, Mapping, Optional, Sequence, Tuple, Union from uuid import UUID -import enlyze.api_clients.timeseries.models as timeseries_api_models +import enlyze.api_client.models as platform_api_models import enlyze.models as user_models -from enlyze.api_clients.production_runs.client import ProductionRunsApiClient -from enlyze.api_clients.production_runs.models import ProductionRun -from enlyze.api_clients.timeseries.client import TimeseriesApiClient +from enlyze.api_client.client import PlatformApiClient from enlyze.constants import ( ENLYZE_BASE_URL, MAXIMUM_NUMBER_OF_VARIABLES_PER_TIMESERIES_REQUEST, @@ -28,8 +26,8 @@ def _get_timeseries_data_from_pages( - pages: Iterator[timeseries_api_models.TimeseriesData], -) -> Optional[timeseries_api_models.TimeseriesData]: + pages: Iterator[platform_api_models.TimeseriesData], +) -> Optional[platform_api_models.TimeseriesData]: try: timeseries_data = next(pages) except StopIteration: @@ -90,19 +88,14 @@ class EnlyzeClient: """ def __init__(self, token: str, *, _base_url: str | None = None) -> None: - self._timeseries_api_client = TimeseriesApiClient( - token=token, - base_url=_base_url or ENLYZE_BASE_URL, - ) - self._production_runs_api_client = ProductionRunsApiClient( - token=token, - base_url=_base_url or ENLYZE_BASE_URL, + self._platform_api_client = PlatformApiClient( + token=token, base_url=_base_url or ENLYZE_BASE_URL ) - def _get_sites(self) -> Iterator[timeseries_api_models.Site]: + def _get_sites(self) -> Iterator[platform_api_models.Site]: """Get all sites from the API""" - return self._timeseries_api_client.get_paginated( - "sites", timeseries_api_models.Site + return self._platform_api_client.get_paginated( + "sites", platform_api_models.Site ) @cache @@ -119,10 +112,10 @@ def get_sites(self) -> list[user_models.Site]: """ return [site.to_user_model() for site in self._get_sites()] - def _get_machines(self) -> Iterator[timeseries_api_models.Machine]: + def _get_machines(self) -> Iterator[platform_api_models.Machine]: """Get all machines from the API""" - return self._timeseries_api_client.get_paginated( - "appliances", timeseries_api_models.Machine + return self._platform_api_client.get_paginated( + "machines", platform_api_models.Machine ) @cache @@ -144,13 +137,13 @@ def get_machines( """ if site: - sites_by_id = {site._id: site} + sites_by_uuid = {site.uuid: site} else: - sites_by_id = {site._id: site for site in self.get_sites()} + sites_by_uuid = {site.uuid: site for site in self.get_sites()} machines = [] for machine_api in self._get_machines(): - site_ = sites_by_id.get(machine_api.site) + site_ = sites_by_uuid.get(machine_api.site) if not site_: continue @@ -160,12 +153,12 @@ def get_machines( def _get_variables( self, machine_uuid: UUID - ) -> Iterator[timeseries_api_models.Variable]: + ) -> Iterator[platform_api_models.Variable]: """Get variables for a machine from the API.""" - return self._timeseries_api_client.get_paginated( + return self._platform_api_client.get_paginated( "variables", - timeseries_api_models.Variable, - params={"appliance": str(machine_uuid)}, + platform_api_models.Variable, + params={"machine": str(machine_uuid)}, ) def get_variables( @@ -195,9 +188,9 @@ def _get_paginated_timeseries( end: datetime, variables: Sequence[str], resampling_interval: Optional[int], - ) -> Iterator[timeseries_api_models.TimeseriesData]: + ) -> Iterator[platform_api_models.TimeseriesData]: params: dict[str, Any] = { - "appliance": machine_uuid, + "machine": machine_uuid, "start_datetime": start.isoformat(), "end_datetime": end.isoformat(), "variables": ",".join(variables), @@ -206,8 +199,8 @@ def _get_paginated_timeseries( if resampling_interval: params["resampling_interval"] = resampling_interval - return self._timeseries_api_client.get_paginated( - "timeseries", timeseries_api_models.TimeseriesData, params=params + return self._platform_api_client.get_paginated( + "timeseries", platform_api_models.TimeseriesData, params=params ) def _get_timeseries( @@ -356,19 +349,19 @@ def _get_production_runs( machine: Optional[UUID] = None, start: Optional[datetime] = None, end: Optional[datetime] = None, - ) -> Iterator[ProductionRun]: + ) -> Iterator[platform_api_models.ProductionRun]: """Get production runs from the API.""" filters = { "production_order": production_order, "product": product, - "appliance": machine, + "machine": machine, "start": start.isoformat() if start else None, "end": end.isoformat() if end else None, } params = {k: v for k, v in filters.items() if v is not None} - return self._production_runs_api_client.get_paginated( - "production-runs", ProductionRun, params=params + return self._platform_api_client.get_paginated( + "production-runs", platform_api_models.ProductionRun, params=params ) def get_production_runs( diff --git a/src/enlyze/constants.py b/src/enlyze/constants.py index 1989dbe..9f956f5 100644 --- a/src/enlyze/constants.py +++ b/src/enlyze/constants.py @@ -1,11 +1,8 @@ #: Base URL of the ENLYZE platform. ENLYZE_BASE_URL = "https://app.enlyze.com" -#: URL sub-path where the Timeseries API is deployed on the ENLYZE platform. -TIMESERIES_API_SUB_PATH = "api/timeseries/v1/" - -#: URL sub-path where the Production Runs API is deployed on the ENLYZE platform. -PRODUCTION_RUNS_API_SUB_PATH = "api/production-runs/v1/" +#: URL sub-path of the ENLYZE platform API. +PLATFORM_API_SUB_PATH = "api/v2/" #: HTTP timeout for requests to the Timeseries API. #: diff --git a/src/enlyze/models.py b/src/enlyze/models.py index a16328d..ad93714 100644 --- a/src/enlyze/models.py +++ b/src/enlyze/models.py @@ -18,7 +18,7 @@ class Site: """ - _id: int + uuid: UUID #: Display name of the site. display_name: str @@ -205,7 +205,7 @@ def to_dataframe(self, use_display_names: bool = False) -> pandas.DataFrame: class OEEComponent: """Individual Overall Equipment Effectiveness (OEE) score - This is calculated by the ENLYZE Platform based on a combination of real machine + This is calculated by the ENLYZE platform based on a combination of real machine data and production order booking information provided by the customer. For more information, please check out https://www.oee.com diff --git a/tests/conftest.py b/tests/conftest.py index 2129782..00420d3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,5 @@ import os -from datetime import datetime, timezone +from datetime import datetime, timedelta, timezone import hypothesis import pytest @@ -31,7 +31,9 @@ ) datetime_before_today_strategy = st.datetimes( - max_value=datetime.now().replace(hour=0), + max_value=(datetime.now() - timedelta(days=1)).replace( + hour=23, minute=59, second=59 + ), min_value=datetime(1970, 1, 1, 12, 0, 0), timezones=st.just(timezone.utc), ) diff --git a/src/enlyze/api_clients/production_runs/__init__.py b/tests/enlyze/api_client/__init__.py similarity index 100% rename from src/enlyze/api_clients/production_runs/__init__.py rename to tests/enlyze/api_client/__init__.py diff --git a/tests/enlyze/api_client/test_client.py b/tests/enlyze/api_client/test_client.py new file mode 100644 index 0000000..0b4e045 --- /dev/null +++ b/tests/enlyze/api_client/test_client.py @@ -0,0 +1,281 @@ +import string +from unittest.mock import patch + +import httpx +import pytest +import respx +from hypothesis import HealthCheck, given, settings +from hypothesis import strategies as st + +from enlyze._version import VERSION +from enlyze.api_client.client import ( + USER_AGENT_NAME_VERSION_SEPARATOR, + PlatformApiClient, + PlatformApiModel, + _construct_user_agent, + _Metadata, + _PaginatedResponse, +) +from enlyze.constants import USER_AGENT +from enlyze.errors import EnlyzeError, InvalidTokenError + + +def _paginated_responses_to_expected_data( + model: PlatformApiModel, paginated_responses: list[_PaginatedResponse] +) -> list: + expected = [] + for r in paginated_responses: + data = r.data if isinstance(r.data, list) else [r.data] + validated = [model.model_validate(e) for e in data] + expected.extend(validated) + return expected + + +@pytest.fixture +def string_model(): + with patch( + "enlyze.api_client.models.PlatformApiModel.model_validate", + side_effect=lambda o: str(o), + ): + yield PlatformApiModel + + +@pytest.fixture +def base_url(): + return "http://api-client-base" + + +@pytest.fixture +def api_client(auth_token, base_url): + return PlatformApiClient(token=auth_token, base_url=base_url) + + +@pytest.fixture +def api_client_base_url(api_client): + return api_client._client.base_url + + +@pytest.fixture +def last_page_metadata(): + return _Metadata(next_cursor=None) + + +@pytest.fixture +def next_page_metadata(): + return _Metadata(next_cursor="100") + + +@pytest.fixture +def response_data_dict() -> dict: + return {"some": "dictionary"} + + +@pytest.fixture +def response_data_list(response_data_dict) -> list: + return [response_data_dict] + + +@pytest.fixture +def empty_paginated_response(last_page_metadata): + return _PaginatedResponse(data=[], metadata=last_page_metadata) + + +@pytest.fixture +def paginated_response_list_no_next_page(response_data_list, last_page_metadata): + return _PaginatedResponse(data=response_data_list, metadata=last_page_metadata) + + +@pytest.fixture +def paginated_response_dict_no_next_page(response_data_dict, last_page_metadata): + return _PaginatedResponse(data=response_data_dict, metadata=last_page_metadata) + + +@pytest.fixture +def paginated_response_list_with_next_page(response_data_list, next_page_metadata): + return _PaginatedResponse(data=response_data_list, metadata=next_page_metadata) + + +@pytest.fixture +def paginated_response_dict_with_next_page(response_data_dict, next_page_metadata): + return _PaginatedResponse(data=response_data_dict, metadata=next_page_metadata) + + +@pytest.fixture +def custom_user_agent(): + return "custom-user-agent" + + +@pytest.fixture +def custom_user_agent_version(): + return "3.4.5" + + +class TestConstructUserAgent: + def test__construct_user_agent_with_defaults(self): + ua, version = _construct_user_agent().split(USER_AGENT_NAME_VERSION_SEPARATOR) + assert ua == USER_AGENT + assert version == VERSION + + def test__construct_user_agent_custom_agent(self, custom_user_agent): + ua, version = _construct_user_agent(user_agent=custom_user_agent).split( + USER_AGENT_NAME_VERSION_SEPARATOR + ) + assert ua == custom_user_agent + assert version == VERSION + + def test__construct_user_agent_custom_version(self, custom_user_agent_version): + ua, version = _construct_user_agent(version=custom_user_agent_version).split( + USER_AGENT_NAME_VERSION_SEPARATOR + ) + assert ua == USER_AGENT + assert version == custom_user_agent_version + + def test__construct_user_agent_custom_agent_and_version( + self, custom_user_agent, custom_user_agent_version + ): + ua, version = _construct_user_agent( + user_agent=custom_user_agent, version=custom_user_agent_version + ).split(USER_AGENT_NAME_VERSION_SEPARATOR) + assert ua == custom_user_agent + assert version == custom_user_agent_version + + +@settings(suppress_health_check=[HealthCheck.function_scoped_fixture]) +@given( + token=st.text(string.printable, min_size=1), +) +@respx.mock +def test_token_auth(token, base_url): + route_is_authenticated = respx.get( + "", + headers__contains={"Authorization": f"Bearer {token}"}, + ).respond(json={}) + + api_client = PlatformApiClient(base_url=base_url, token=token) + api_client.get("") + assert route_is_authenticated.called + + +@respx.mock +def test_base_url(api_client, api_client_base_url): + endpoint = "some-endpoint" + + route = respx.get( + httpx.URL(api_client_base_url).join(endpoint), + ).respond(json={}) + + api_client.get(endpoint) + assert route.called + + +@respx.mock +def test_get_raises_cannot_read(api_client): + with pytest.raises(EnlyzeError, match="Couldn't read"): + respx.get("").mock(side_effect=Exception("oops")) + api_client.get("") + + +@respx.mock +def test_get_raises_on_error(api_client): + with pytest.raises(EnlyzeError, match="returned error 404"): + respx.get("").respond(404) + api_client.get("") + + +@respx.mock +def test_get_raises_invalid_token_error_not_authenticated(api_client): + with pytest.raises(InvalidTokenError): + respx.get("").respond(403) + api_client.get("") + + +@respx.mock +def test_get_raises_non_json(api_client): + with pytest.raises(EnlyzeError, match="didn't return a valid JSON object"): + respx.get("").respond(200, json=None) + api_client.get("") + + +@pytest.mark.parametrize( + "invalid_payload", + [ + "not a paginated response", + {"data": "something but not a list"}, + ], +) +@respx.mock +def test_get_paginated_raises_invalid_pagination_schema( + api_client, string_model, invalid_payload +): + with pytest.raises(EnlyzeError, match="Paginated response expected"): + respx.get("").respond(json=invalid_payload) + next(api_client.get_paginated("", string_model)) + + +@pytest.mark.parametrize( + "paginated_response_no_next_page_fixture", + ["paginated_response_list_no_next_page", "paginated_response_dict_no_next_page"], +) +@respx.mock +def test_get_paginated_single_page( + api_client, + string_model, + paginated_response_no_next_page_fixture, + request, +): + paginated_response_no_next_page = request.getfixturevalue( + paginated_response_no_next_page_fixture + ) + params = {"params": {"param1": "value1"}} + expected_data = _paginated_responses_to_expected_data( + string_model, [paginated_response_no_next_page] + ) + + route = respx.get("", params=params).respond( + 200, json=paginated_response_no_next_page.model_dump() + ) + + data = list(api_client.get_paginated("", string_model, params=params)) + + assert route.called + assert route.call_count == 1 + assert expected_data == data + + +@pytest.mark.parametrize( + "paginated_response_with_next_page_fixture,paginated_response_no_next_page_fixture", + [ + [ + "paginated_response_dict_with_next_page", + "paginated_response_dict_no_next_page", + ], + [ + "paginated_response_list_with_next_page", + "paginated_response_list_no_next_page", + ], + ], +) +@respx.mock +def test_get_paginated_multi_page( + api_client, + paginated_response_with_next_page_fixture, + paginated_response_no_next_page_fixture, + string_model, + request, +): + initial_params = {"irrelevant": "values"} + responses = [ + request.getfixturevalue(paginated_response_with_next_page_fixture), + request.getfixturevalue(paginated_response_no_next_page_fixture), + ] + + expected_data = _paginated_responses_to_expected_data(string_model, responses) + + route = respx.get("", params=initial_params) + route.side_effect = [httpx.Response(200, json=r.model_dump()) for r in responses] + + data = list(api_client.get_paginated("", PlatformApiModel, params=initial_params)) + + assert route.called + assert route.call_count == 2 + assert data == expected_data diff --git a/tests/enlyze/api_clients/timeseries/test_models.py b/tests/enlyze/api_client/test_models.py similarity index 98% rename from tests/enlyze/api_clients/timeseries/test_models.py rename to tests/enlyze/api_client/test_models.py index 7e6683c..68470bc 100644 --- a/tests/enlyze/api_clients/timeseries/test_models.py +++ b/tests/enlyze/api_client/test_models.py @@ -4,7 +4,7 @@ import pytest -from enlyze.api_clients.timeseries.models import TimeseriesData +from enlyze.api_client.models import TimeseriesData # We use this to skip columns that contain the timestamp assuming # it starts at the beginning of the sequence. We also use it diff --git a/tests/enlyze/api_clients/__init__.py b/tests/enlyze/api_clients/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/enlyze/api_clients/conftest.py b/tests/enlyze/api_clients/conftest.py deleted file mode 100644 index 1773257..0000000 --- a/tests/enlyze/api_clients/conftest.py +++ /dev/null @@ -1,24 +0,0 @@ -from unittest.mock import patch - -import pytest - -from enlyze.api_clients.base import ApiBaseModel - - -@pytest.fixture -def string_model(): - with patch( - "enlyze.api_clients.base.ApiBaseModel.model_validate", - side_effect=lambda o: str(o), - ): - yield ApiBaseModel - - -@pytest.fixture -def endpoint(): - return "https://my-endpoint.com" - - -@pytest.fixture -def base_url(): - return "http://api-client-base" diff --git a/tests/enlyze/api_clients/production_runs/__init__.py b/tests/enlyze/api_clients/production_runs/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/enlyze/api_clients/production_runs/test_client.py b/tests/enlyze/api_clients/production_runs/test_client.py deleted file mode 100644 index f64167d..0000000 --- a/tests/enlyze/api_clients/production_runs/test_client.py +++ /dev/null @@ -1,117 +0,0 @@ -import httpx -import pytest -import respx - -from enlyze.api_clients.production_runs.client import ( - ProductionRunsApiClient, - _Metadata, - _PaginatedResponse, -) -from enlyze.constants import PRODUCTION_RUNS_API_SUB_PATH - - -@pytest.fixture -def metadata_last_page(): - return _Metadata(has_more=False, next_cursor=None) - - -@pytest.fixture -def metadata_next_page(): - return _Metadata(has_more=True, next_cursor=1337) - - -@pytest.fixture -def response_data(): - return [{"id": i, "name": f"row-{i}"} for i in range(10)] - - -@pytest.fixture -def paginated_response_no_next_page(response_data, metadata_last_page): - return _PaginatedResponse(data=response_data, metadata=metadata_last_page) - - -@pytest.fixture -def paginated_response_with_next_page(response_data, metadata_next_page): - return _PaginatedResponse(data=response_data, metadata=metadata_next_page) - - -@pytest.fixture -def production_runs_client(auth_token, base_url): - return ProductionRunsApiClient(token=auth_token, base_url=base_url) - - -def test_timeseries_api_appends_sub_path(auth_token, base_url): - expected = str(httpx.URL(base_url).join(PRODUCTION_RUNS_API_SUB_PATH)) - client = ProductionRunsApiClient(token=auth_token, base_url=base_url) - assert client._full_url("") == expected - - -@pytest.mark.parametrize( - ("response_fixture", "expected_has_more"), - ( - ("paginated_response_no_next_page", False), - ("paginated_response_with_next_page", True), - ), -) -def test_has_more(request, response_fixture, expected_has_more, production_runs_client): - response = request.getfixturevalue(response_fixture) - assert production_runs_client._has_more(response) == expected_has_more - - -def test_next_page_call_args( - production_runs_client, endpoint, paginated_response_with_next_page -): - params = {"some": "param"} - kwargs = {"some": "kwarg"} - url = endpoint - next_url, next_params, next_kwargs = production_runs_client._next_page_call_args( - url=url, - params=params, - paginated_response=paginated_response_with_next_page, - **kwargs, - ) - assert next_url == url - assert next_params == { - **params, - "cursor": paginated_response_with_next_page.metadata.next_cursor, - } - assert next_kwargs == kwargs - - -@respx.mock -def test_timeseries_api_get_paginated_single_page( - production_runs_client, string_model, paginated_response_no_next_page -): - expected_data = [ - string_model.model_validate(e) for e in paginated_response_no_next_page.data - ] - respx.get("").respond(json=paginated_response_no_next_page.model_dump()) - assert list(production_runs_client.get_paginated("", string_model)) == expected_data - - -@respx.mock -def test_timeseries_api_get_paginated_multi_page( - production_runs_client, - string_model, - paginated_response_with_next_page, - paginated_response_no_next_page, -): - expected_data = [ - string_model.model_validate(e) - for e in [ - *paginated_response_no_next_page.data, - *paginated_response_with_next_page.data, - ] - ] - next_cursor = paginated_response_with_next_page.metadata.next_cursor - respx.get("", params=f"cursor={next_cursor}").respond( - 200, json=paginated_response_no_next_page.model_dump() - ) - respx.get("").mock( - side_effect=lambda request: httpx.Response( - 200, - json=paginated_response_with_next_page.model_dump(), - ) - ) - - assert list(production_runs_client.get_paginated("", string_model)) == expected_data diff --git a/tests/enlyze/api_clients/test_base.py b/tests/enlyze/api_clients/test_base.py deleted file mode 100644 index 897cd81..0000000 --- a/tests/enlyze/api_clients/test_base.py +++ /dev/null @@ -1,333 +0,0 @@ -import string -from unittest.mock import MagicMock, call, patch - -import httpx -import pytest -import respx -from hypothesis import HealthCheck, given, settings -from hypothesis import strategies as st - -from enlyze._version import VERSION -from enlyze.api_clients.base import ( - USER_AGENT_NAME_VERSION_SEPARATOR, - ApiBaseClient, - ApiBaseModel, - PaginatedResponseBaseModel, - _construct_user_agent, -) -from enlyze.constants import USER_AGENT -from enlyze.errors import EnlyzeError, InvalidTokenError - - -class Metadata(ApiBaseModel): - has_more: bool - next_cursor: int | None = None - - -class PaginatedResponseModel(PaginatedResponseBaseModel): - metadata: Metadata - data: list - - -def _transform_paginated_data_integers(data: list) -> list: - return [n * n for n in data] - - -@pytest.fixture -def last_page_metadata(): - return Metadata(has_more=False, next_cursor=None) - - -@pytest.fixture -def next_page_metadata(): - return Metadata(has_more=True, next_cursor=100) - - -@pytest.fixture -def empty_paginated_response(last_page_metadata): - return PaginatedResponseModel(data=[], metadata=last_page_metadata) - - -@pytest.fixture -def response_data_integers(): - return list(range(20)) - - -@pytest.fixture -def paginated_response_with_next_page(response_data_integers, next_page_metadata): - return PaginatedResponseModel( - data=response_data_integers, metadata=next_page_metadata - ) - - -@pytest.fixture -def paginated_response_no_next_page(response_data_integers, last_page_metadata): - return PaginatedResponseModel( - data=response_data_integers, metadata=last_page_metadata - ) - - -@pytest.fixture -def base_client(auth_token, string_model, base_url): - mock_has_more = MagicMock() - mock_transform_paginated_response_data = MagicMock(side_effect=lambda e: e) - mock_next_page_call_args = MagicMock() - with patch.multiple( - ApiBaseClient, - __abstractmethods__=set(), - _has_more=mock_has_more, - _next_page_call_args=mock_next_page_call_args, - _transform_paginated_response_data=mock_transform_paginated_response_data, - ): - client = ApiBaseClient[PaginatedResponseModel]( - token=auth_token, - base_url=base_url, - ) - client.PaginatedResponseModel = PaginatedResponseModel - yield client - - -@pytest.fixture -def custom_user_agent(): - return "custom-user-agent" - - -@pytest.fixture -def custom_user_agent_version(): - return "3.4.5" - - -class TestConstructUserAgent: - def test__construct_user_agent_with_defaults(self): - ua, version = _construct_user_agent().split(USER_AGENT_NAME_VERSION_SEPARATOR) - assert ua == USER_AGENT - assert version == VERSION - - def test__construct_user_agent_custom_agent(self, custom_user_agent): - ua, version = _construct_user_agent(user_agent=custom_user_agent).split( - USER_AGENT_NAME_VERSION_SEPARATOR - ) - assert ua == custom_user_agent - assert version == VERSION - - def test__construct_user_agent_custom_version(self, custom_user_agent_version): - ua, version = _construct_user_agent(version=custom_user_agent_version).split( - USER_AGENT_NAME_VERSION_SEPARATOR - ) - assert ua == USER_AGENT - assert version == custom_user_agent_version - - def test__construct_user_agent_custom_agent_and_version( - self, custom_user_agent, custom_user_agent_version - ): - ua, version = _construct_user_agent( - user_agent=custom_user_agent, version=custom_user_agent_version - ).split(USER_AGENT_NAME_VERSION_SEPARATOR) - assert ua == custom_user_agent - assert version == custom_user_agent_version - - -@settings(suppress_health_check=[HealthCheck.function_scoped_fixture]) -@given( - token=st.text(string.printable, min_size=1), -) -@respx.mock -def test_token_auth(token, base_url): - with patch.multiple(ApiBaseClient, __abstractmethods__=set()): - client = ApiBaseClient(token=token, base_url=base_url) - - route_is_authenticated = respx.get( - "", - headers__contains={"Authorization": f"Token {token}"}, - ).respond(json={}) - - client.get("") - assert route_is_authenticated.called - - -@respx.mock -def test_base_url(base_client, base_url): - endpoint = "some-endpoint" - - route = respx.get( - httpx.URL(base_url).join(endpoint), - ).respond(json={}) - - base_client.get(endpoint) - assert route.called - - -@respx.mock -def test_get_raises_cannot_read(base_client): - with pytest.raises(EnlyzeError, match="Couldn't read"): - respx.get("").mock(side_effect=Exception("oops")) - base_client.get("") - - -@respx.mock -def test_get_raises_on_error(base_client): - with pytest.raises(EnlyzeError, match="returned error 404"): - respx.get("").respond(404) - base_client.get("") - - -@respx.mock -def test_get_raises_invalid_token_error_not_authenticated(base_client): - with pytest.raises(InvalidTokenError): - respx.get("").respond(403) - base_client.get("") - - -@respx.mock -def test_get_raises_non_json(base_client): - with pytest.raises(EnlyzeError, match="didn't return a valid JSON object"): - respx.get("").respond(200, json=None) - base_client.get("") - - -@respx.mock -def test_get_paginated_single_page( - base_client, string_model, paginated_response_no_next_page -): - endpoint = "https://irrelevant-url.com" - params = {"params": {"param1": "value1"}} - expected_data = [ - string_model.model_validate(e) for e in paginated_response_no_next_page.data - ] - - mock_has_more = base_client._has_more - mock_has_more.return_value = False - route = respx.get(endpoint, params=params).respond( - 200, json=paginated_response_no_next_page.model_dump() - ) - - data = list(base_client.get_paginated(endpoint, ApiBaseModel, params=params)) - - assert route.called - assert route.call_count == 1 - assert expected_data == data - mock_has_more.assert_called_once_with(paginated_response_no_next_page) - - -@respx.mock -def test_get_paginated_multi_page( - base_client, - paginated_response_with_next_page, - paginated_response_no_next_page, - string_model, -): - endpoint = "https://irrelevant-url.com" - initial_params = {"irrelevant": "values"} - expected_data = [ - string_model.model_validate(e) - for e in [ - *paginated_response_with_next_page.data, - *paginated_response_no_next_page.data, - ] - ] - - mock_has_more = base_client._has_more - mock_has_more.side_effect = [True, False] - - mock_next_page_call_args = base_client._next_page_call_args - mock_next_page_call_args.return_value = (endpoint, {}, {}) - - route = respx.get(endpoint) - route.side_effect = [ - httpx.Response(200, json=paginated_response_with_next_page.model_dump()), - httpx.Response(200, json=paginated_response_no_next_page.model_dump()), - ] - - data = list( - base_client.get_paginated(endpoint, ApiBaseModel, params=initial_params) - ) - - assert route.called - assert route.call_count == 2 - assert data == expected_data - mock_has_more.assert_has_calls( - [ - call(paginated_response_with_next_page), - call(paginated_response_no_next_page), - ] - ) - mock_next_page_call_args.assert_called_once_with( - url=endpoint, - params=initial_params, - paginated_response=paginated_response_with_next_page, - ) - - -@pytest.mark.parametrize( - "invalid_payload", - [ - "not a paginated response", - {"data": "something but not a list"}, - ], -) -@respx.mock -def test_get_paginated_raises_invalid_pagination_schema( - base_client, - invalid_payload, -): - with pytest.raises(EnlyzeError, match="Paginated response expected"): - respx.get("").respond(json=invalid_payload) - next( - base_client.get_paginated( - "", - ApiBaseModel, - ) - ) - - -@respx.mock -def test_get_paginated_raises_enlyze_error( - base_client, string_model, paginated_response_no_next_page -): - # most straightforward way to raise a pydantic.ValidationError - # https://github.com/pydantic/pydantic/discussions/6459 - string_model.model_validate.side_effect = lambda _: Metadata() - respx.get("").respond(200, json=paginated_response_no_next_page.model_dump()) - - with pytest.raises(EnlyzeError, match="ENLYZE platform API returned an unparsable"): - next(base_client.get_paginated("", string_model)) - - -@respx.mock -def test_get_paginated_transform_paginated_data( - base_client, paginated_response_no_next_page, string_model -): - base_client._has_more.return_value = False - base_client._transform_paginated_response_data.side_effect = ( - _transform_paginated_data_integers - ) - expected_data = [ - string_model.model_validate(e) - for e in _transform_paginated_data_integers( - paginated_response_no_next_page.data - ) - ] - - route = respx.get("").respond( - 200, json=paginated_response_no_next_page.model_dump() - ) - - data = list(base_client.get_paginated("", ApiBaseModel)) - - base_client._transform_paginated_response_data.assert_called_once_with( - paginated_response_no_next_page.data - ) - - assert route.called - assert route.call_count == 1 - assert data == expected_data - - -def test_transform_paginated_data_returns_unmutated_element_by_default( - auth_token, base_url -): - with patch.multiple(ApiBaseClient, __abstractmethods__=set()): - client = ApiBaseClient(token=auth_token, base_url=base_url) - data = [1, 2, 3] - value = client._transform_paginated_response_data(data) - assert data == value diff --git a/tests/enlyze/api_clients/timeseries/__init__.py b/tests/enlyze/api_clients/timeseries/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/enlyze/api_clients/timeseries/test_client.py b/tests/enlyze/api_clients/timeseries/test_client.py deleted file mode 100644 index 035b5dc..0000000 --- a/tests/enlyze/api_clients/timeseries/test_client.py +++ /dev/null @@ -1,108 +0,0 @@ -import httpx -import pytest -import respx - -from enlyze.api_clients.timeseries.client import TimeseriesApiClient, _PaginatedResponse -from enlyze.constants import TIMESERIES_API_SUB_PATH - - -@pytest.fixture -def response_data_list() -> list: - return [1, 2, 3] - - -@pytest.fixture -def response_data_dict() -> dict: - return {"some": "dictionary"} - - -@pytest.fixture -def transformed_data_dict(response_data_dict) -> list[dict]: - return [response_data_dict] - - -@pytest.fixture -def paginated_response_no_next_page(): - return _PaginatedResponse(data=[], next=None) - - -@pytest.fixture -def paginated_response_with_next_page(endpoint): - return _PaginatedResponse( - data=[], - next=f"{endpoint}?offset=1337", - ) - - -@pytest.fixture -def timeseries_client(auth_token, base_url): - return TimeseriesApiClient(token=auth_token, base_url=base_url) - - -def test_timeseries_api_appends_sub_path(auth_token, base_url): - expected = str(httpx.URL(base_url).join(TIMESERIES_API_SUB_PATH)) - client = TimeseriesApiClient(token=auth_token, base_url=base_url) - assert client._full_url("") == expected - - -@pytest.mark.parametrize( - ("response_fixture", "expected_has_more"), - ( - ("paginated_response_no_next_page", False), - ("paginated_response_with_next_page", True), - ), -) -def test_has_more(request, response_fixture, expected_has_more, timeseries_client): - response = request.getfixturevalue(response_fixture) - assert timeseries_client._has_more(response) == expected_has_more - - -@pytest.mark.parametrize( - ("data_fixture", "expected_fixture"), - ( - ("response_data_list", "response_data_list"), - ("response_data_dict", "transformed_data_dict"), - ), -) -def test_get_paginated_transform_paginated_data( - request, timeseries_client, data_fixture, expected_fixture -): - data = request.getfixturevalue(data_fixture) - expected = request.getfixturevalue(expected_fixture) - assert timeseries_client._transform_paginated_response_data(data) == expected - - -def test_next_page_call_args( - timeseries_client, endpoint, paginated_response_with_next_page -): - params = {"some": "param"} - kwargs = {"some": "kwarg"} - url = endpoint - next_url, next_params, next_kwargs = timeseries_client._next_page_call_args( - url=url, - params=params, - paginated_response=paginated_response_with_next_page, - **kwargs, - ) - assert next_url == str(paginated_response_with_next_page.next) - assert next_params == params - assert next_kwargs == kwargs - - -@respx.mock -def test_timeseries_api_get_paginated_single_page(timeseries_client, string_model): - respx.get("").respond(json={"data": ["a", "b"], "next": None}) - assert list(timeseries_client.get_paginated("", string_model)) == ["a", "b"] - - -@respx.mock -def test_timeseries_api_get_paginated_multi_page(timeseries_client, string_model): - respx.get("", params="offset=1").respond(json={"data": ["z"], "next": None}) - respx.get("").mock( - side_effect=lambda request: httpx.Response( - 200, - json={"data": ["x", "y"], "next": str(request.url.join("?offset=1"))}, - ) - ) - - assert list(timeseries_client.get_paginated("", string_model)) == ["x", "y", "z"] diff --git a/tests/enlyze/test_auth.py b/tests/enlyze/test_auth.py index a16965e..c493415 100644 --- a/tests/enlyze/test_auth.py +++ b/tests/enlyze/test_auth.py @@ -17,7 +17,7 @@ def test_token_auth(token): response = httpx.get("https://foo.bar/", auth=auth) assert my_route.called - assert response.request.headers["Authorization"] == f"Token {token}" + assert response.request.headers["Authorization"] == f"Bearer {token}" @pytest.mark.parametrize("invalid_token", {"", None, 0}) diff --git a/tests/enlyze/test_client.py b/tests/enlyze/test_client.py index 9bfe547..e4eb6c4 100644 --- a/tests/enlyze/test_client.py +++ b/tests/enlyze/test_client.py @@ -8,24 +8,14 @@ from hypothesis import HealthCheck, given, settings from hypothesis import strategies as st -import enlyze.api_clients.production_runs.models as production_runs_api_models -import enlyze.api_clients.timeseries.models as timeseries_api_models +import enlyze.api_client.models as platform_api_models import enlyze.models as user_models -from enlyze.api_clients.production_runs.client import ( - _Metadata as _ProductionRunsApiResponseMetadata, -) -from enlyze.api_clients.production_runs.client import ( - _PaginatedResponse as _PaginatedProductionRunsResponse, -) -from enlyze.api_clients.timeseries.client import ( - _PaginatedResponse as _PaginatedTimeseriesResponse, -) +from enlyze.api_client.client import _Metadata, _PaginatedResponse from enlyze.client import EnlyzeClient from enlyze.constants import ( ENLYZE_BASE_URL, MAXIMUM_NUMBER_OF_VARIABLES_PER_TIMESERIES_REQUEST, - PRODUCTION_RUNS_API_SUB_PATH, - TIMESERIES_API_SUB_PATH, + PLATFORM_API_SUB_PATH, ) from enlyze.errors import EnlyzeError, ResamplingValidationError from tests.conftest import ( @@ -38,35 +28,34 @@ MACHINE_UUID = "ebef7e5a-5921-4cf3-9a52-7ff0e98e8306" PRODUCT_CODE = "product-code" PRODUCTION_ORDER = "production-order" -SITE_ID = 1 +SITE_UUID_ONE = "4e655719-03e8-465e-9e24-db42c2d6735a" +SITE_UUID_TWO = "088da69d-356a-41f8-819e-04c38592f0ac" create_float_strategy = partial( st.floats, allow_nan=False, allow_infinity=False, allow_subnormal=False ) oee_score_strategy = st.builds( - production_runs_api_models.OEEComponent, + platform_api_models.OEEComponent, score=create_float_strategy(min_value=0, max_value=1.0), time_loss=st.just(10), ) quantity_strategy = st.builds( - production_runs_api_models.Quantity, + platform_api_models.Quantity, value=create_float_strategy(min_value=0, max_value=1.0), ) production_runs_strategy = st.lists( st.builds( - production_runs_api_models.ProductionRun, + platform_api_models.ProductionRun, uuid=st.uuids(), start=datetime_before_today_strategy, end=datetime_today_until_now_strategy, - appliance=st.builds( - production_runs_api_models.Machine, uuid=st.just(MACHINE_UUID) - ), + machine=st.builds(platform_api_models.Machine, uuid=st.just(MACHINE_UUID)), product=st.builds( - production_runs_api_models.Product, + platform_api_models.Product, code=st.just(PRODUCT_CODE), ), production_order=st.just(PRODUCTION_ORDER), @@ -93,23 +82,13 @@ def end_datetime(): return datetime.now() -class PaginatedTimeseriesApiResponse(httpx.Response): - def __init__(self, data, next=None) -> None: - super().__init__( - status_code=HTTPStatus.OK, - text=_PaginatedTimeseriesResponse(data=data, next=next).model_dump_json(), - headers=MOCK_RESPONSE_HEADERS, - ) - - -class PaginatedProductionRunsApiResponse(httpx.Response): - def __init__(self, data, has_more=False, next_cursor=None) -> None: +class PaginatedPlatformApiResponse(httpx.Response): + def __init__(self, data: list | dict, next_cursor=None) -> None: super().__init__( status_code=HTTPStatus.OK, - text=_PaginatedProductionRunsResponse( + text=_PaginatedResponse( data=data, - metadata=_ProductionRunsApiResponseMetadata( - has_more=has_more, + metadata=_Metadata( next_cursor=next_cursor, ), ).model_dump_json(), @@ -127,33 +106,39 @@ def make_client(): @given( - site1=st.builds(timeseries_api_models.Site), - site2=st.builds(timeseries_api_models.Site), + site1=st.builds(platform_api_models.Site), + site2=st.builds(platform_api_models.Site), ) def test_get_sites(site1, site2): client = make_client() - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: - mock.get("sites").mock(PaginatedTimeseriesApiResponse(data=[site1, site2])) + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + mock.get("sites").mock( + PaginatedPlatformApiResponse(data=[s.model_dump() for s in [site1, site2]]) + ) sites = client.get_sites() assert sites == [site1.to_user_model(), site2.to_user_model()] @given( - site1=st.builds(timeseries_api_models.Site, id=st.just(1)), - site2=st.builds(timeseries_api_models.Site, id=st.just(2)), - machine1=st.builds(timeseries_api_models.Machine, site=st.just(1)), - machine2=st.builds(timeseries_api_models.Machine, site=st.just(2)), + site1=st.builds(platform_api_models.Site, uuid=st.just(SITE_UUID_ONE)), + site2=st.builds(platform_api_models.Site, uuid=st.just(SITE_UUID_TWO)), + machine1=st.builds(platform_api_models.Machine, site=st.just(SITE_UUID_ONE)), + machine2=st.builds(platform_api_models.Machine, site=st.just(SITE_UUID_TWO)), ) def test_get_machines(site1, site2, machine1, machine2): client = make_client() - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: - mock.get("appliances").mock( - PaginatedTimeseriesApiResponse(data=[machine1, machine2]) + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + mock.get("machines").mock( + PaginatedPlatformApiResponse( + data=[m.model_dump() for m in [machine1, machine2]] + ) + ) + mock.get("sites").mock( + PaginatedPlatformApiResponse(data=[s.model_dump() for s in [site1, site2]]) ) - mock.get("sites").mock(PaginatedTimeseriesApiResponse(data=[site1, site2])) all_machines = client.get_machines() assert all_machines == [ @@ -168,28 +153,32 @@ def test_get_machines(site1, site2, machine1, machine2): @given( - machine=st.builds(timeseries_api_models.Machine), + machine=st.builds(platform_api_models.Machine), ) def test_get_machines_site_not_found(machine): client = make_client() - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: - mock.get("sites").mock(PaginatedTimeseriesApiResponse(data=[])) - mock.get("appliances").mock(PaginatedTimeseriesApiResponse(data=[machine])) + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + mock.get("sites").mock(PaginatedPlatformApiResponse(data=[])) + mock.get("machines").mock( + PaginatedPlatformApiResponse(data=[machine.model_dump()]) + ) assert client.get_machines() == [] @given( machine=st.builds(user_models.Machine), - var1=st.builds(timeseries_api_models.Variable), - var2=st.builds(timeseries_api_models.Variable), + var1=st.builds(platform_api_models.Variable), + var2=st.builds(platform_api_models.Variable), ) def test_get_variables(machine, var1, var2): client = make_client() - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: - mock.get("variables").mock(PaginatedTimeseriesApiResponse(data=[var1, var2])) + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + mock.get("variables").mock( + PaginatedPlatformApiResponse(data=[v.model_dump() for v in [var1, var2]]) + ) variables = client.get_variables(machine) assert variables == [ @@ -237,10 +226,11 @@ def test_get_timeseries( client = make_client() variable = data.draw(variable_strategy) - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: - mock.get("timeseries", params="offset=1").mock( - PaginatedTimeseriesApiResponse( - data=timeseries_api_models.TimeseriesData( + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + cursor = "next-1" + mock.get("timeseries", params=f"cursor={cursor}").mock( + PaginatedPlatformApiResponse( + data=platform_api_models.TimeseriesData( columns=["time", str(variable.uuid)], records=records[1:], ).model_dump() @@ -248,12 +238,12 @@ def test_get_timeseries( ) mock.get("timeseries").mock( - side_effect=lambda request: PaginatedTimeseriesApiResponse( - data=timeseries_api_models.TimeseriesData( + side_effect=lambda request: PaginatedPlatformApiResponse( + data=platform_api_models.TimeseriesData( columns=["time", str(variable.uuid)], records=records[:1], ).model_dump(), - next=str(request.url.join("?offset=1")), + next_cursor=cursor, ) ) if timeseries_call == "without_resampling": @@ -286,7 +276,7 @@ def test_get_timeseries( "data", [ {}, - timeseries_api_models.TimeseriesData(columns=[], records=[]).model_dump(), + platform_api_models.TimeseriesData(columns=[], records=[]).model_dump(), ], ) @pytest.mark.parametrize( @@ -320,8 +310,8 @@ def test_get_timeseries_returns_none_on_empty_response( variable = data_strategy.draw(variable_strategy) client = make_client() - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: - mock.get("timeseries").mock(PaginatedTimeseriesApiResponse(data=data)) + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + mock.get("timeseries").mock(PaginatedPlatformApiResponse(data=data)) if timeseries_call == "without_resampling": assert ( client.get_timeseries(start_datetime, end_datetime, [variable]) is None @@ -346,7 +336,7 @@ def test_get_timeseries_returns_none_on_empty_response( min_size=2, max_size=5, ), - machine=st.builds(timeseries_api_models.Machine, uuid=st.just(MACHINE_UUID)), + machine=st.builds(platform_api_models.Machine, uuid=st.just(MACHINE_UUID)), ) @settings(suppress_health_check=[HealthCheck.function_scoped_fixture]) def test__get_timeseries_raises_on_mixed_response( @@ -373,11 +363,11 @@ def test__get_timeseries_raises_on_mixed_response( ) ) - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: mock.get("timeseries").mock( side_effect=[ - PaginatedTimeseriesApiResponse( - data=timeseries_api_models.TimeseriesData( + PaginatedPlatformApiResponse( + data=platform_api_models.TimeseriesData( columns=[ "time", *[ @@ -390,8 +380,8 @@ def test__get_timeseries_raises_on_mixed_response( records=records, ).model_dump(), ), - PaginatedTimeseriesApiResponse( - data=timeseries_api_models.TimeseriesData( + PaginatedPlatformApiResponse( + data=platform_api_models.TimeseriesData( columns=[], records=[], ).model_dump(), @@ -446,10 +436,10 @@ def test_get_timeseries_raises_api_returned_no_timestamps( ): client = make_client() - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: mock.get("timeseries").mock( - PaginatedTimeseriesApiResponse( - data=timeseries_api_models.TimeseriesData( + PaginatedPlatformApiResponse( + data=platform_api_models.TimeseriesData( columns=["something but not time"], records=[], ).model_dump() @@ -498,7 +488,7 @@ def test__get_timeseries_raises_on_chunk_value_error( variable=st.builds( user_models.Variable, data_type=st.just("INTEGER"), - machine=st.builds(timeseries_api_models.Machine), + machine=st.builds(platform_api_models.Machine), ), records=st.lists( st.tuples( @@ -519,10 +509,10 @@ def f(*args, **kwargs): monkeypatch.setattr("enlyze.client.reduce", f) - with respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as mock: + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: mock.get("timeseries").mock( - PaginatedTimeseriesApiResponse( - data=timeseries_api_models.TimeseriesData( + PaginatedPlatformApiResponse( + data=platform_api_models.TimeseriesData( columns=["time", str(variable.uuid)], records=records, ).model_dump() @@ -539,11 +529,11 @@ def f(*args, **kwargs): st.text(), ), machine=st.builds( - timeseries_api_models.Machine, - site=st.just(SITE_ID), + platform_api_models.Machine, + site=st.just(SITE_UUID_ONE), uuid=st.just(MACHINE_UUID), ), - site=st.builds(timeseries_api_models.Site, id=st.just(SITE_ID)), + site=st.builds(platform_api_models.Site, uuid=st.just(SITE_UUID_ONE)), start=st.one_of(datetime_before_today_strategy, st.none()), end=st.one_of(datetime_today_until_now_strategy, st.none()), production_runs=production_runs_strategy, @@ -563,20 +553,13 @@ def test_get_production_runs( machine_user_model = machine.to_user_model(site_user_model) machines_by_uuid = {machine.uuid: machine_user_model} - with ( - respx_mock_with_base_url(TIMESERIES_API_SUB_PATH) as timeseries_api_mock, - respx_mock_with_base_url( - PRODUCTION_RUNS_API_SUB_PATH - ) as production_runs_api_mock, - ): - timeseries_api_mock.get("appliances").mock( - PaginatedTimeseriesApiResponse(data=[machine]) - ) - timeseries_api_mock.get("sites").mock( - PaginatedTimeseriesApiResponse(data=[site]) + with respx_mock_with_base_url(PLATFORM_API_SUB_PATH) as mock: + mock.get("machines").mock( + PaginatedPlatformApiResponse(data=[machine.model_dump()]) ) - production_runs_api_mock.get("production-runs").mock( - PaginatedProductionRunsApiResponse( + mock.get("sites").mock(PaginatedPlatformApiResponse(data=[site.model_dump()])) + mock.get("production-runs").mock( + PaginatedPlatformApiResponse( data=[p.model_dump(by_alias=True) for p in production_runs] ) )