From 41274597fef0414408577b7901839da046a4eb6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alex=20Gr=C3=B6nholm?= Date: Mon, 29 Jul 2024 15:39:54 +0300 Subject: [PATCH] Improved the repr() output of schedulers, data stores and event brokers --- docs/versionhistory.rst | 2 ++ src/apscheduler/_schedulers/async_.py | 7 +++++-- src/apscheduler/_schedulers/sync.py | 7 +++++-- src/apscheduler/_utils.py | 15 ++++++++++++++ src/apscheduler/datastores/memory.py | 6 +++++- src/apscheduler/datastores/mongodb.py | 7 ++++++- src/apscheduler/datastores/sqlalchemy.py | 6 +++++- src/apscheduler/eventbrokers/asyncpg.py | 6 +++++- src/apscheduler/eventbrokers/local.py | 6 +++++- src/apscheduler/eventbrokers/mqtt.py | 6 +++++- src/apscheduler/eventbrokers/psycopg.py | 6 +++++- src/apscheduler/eventbrokers/redis.py | 6 +++++- tests/test_datastores.py | 25 ++++++++++++++++++++++++ tests/test_schedulers.py | 14 +++++++++++++ 14 files changed, 107 insertions(+), 12 deletions(-) diff --git a/docs/versionhistory.rst b/docs/versionhistory.rst index 687303334..5eb43aa69 100644 --- a/docs/versionhistory.rst +++ b/docs/versionhistory.rst @@ -56,6 +56,8 @@ APScheduler, see the :doc:`migration section `. synchronous engine - Fixed serializers raising their own exceptions instead of ``SerializationError`` and ``DeserializationError`` as appropriate +- Fixed ``repr()`` outputs of schedulers, data stores and event brokers to be much more + useful and reasonable **4.0.0a5** diff --git a/src/apscheduler/_schedulers/async_.py b/src/apscheduler/_schedulers/async_.py index 43fe30b1c..9ecd9020e 100644 --- a/src/apscheduler/_schedulers/async_.py +++ b/src/apscheduler/_schedulers/async_.py @@ -59,7 +59,7 @@ Task, TaskDefaults, ) -from .._utils import UnsetValue, merge_metadata, unset +from .._utils import UnsetValue, create_repr, merge_metadata, unset from .._validators import non_negative_number from ..abc import DataStore, EventBroker, JobExecutor, Subscription, Trigger from ..datastores.memory import MemoryDataStore @@ -85,7 +85,7 @@ T = TypeVar("T") -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class AsyncScheduler: """ An asynchronous (AnyIO based) scheduler implementation. @@ -194,6 +194,9 @@ async def __aexit__( await self.stop() await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb) + def __repr__(self) -> str: + return create_repr(self, "identity", "role", "data_store", "event_broker") + async def _ensure_services_initialized(self, exit_stack: AsyncExitStack) -> None: """ Initialize the data store and event broker if this hasn't already been done. diff --git a/src/apscheduler/_schedulers/sync.py b/src/apscheduler/_schedulers/sync.py index 6c75fe3b7..755335c63 100644 --- a/src/apscheduler/_schedulers/sync.py +++ b/src/apscheduler/_schedulers/sync.py @@ -19,7 +19,7 @@ from .._enums import CoalescePolicy, ConflictPolicy, RunState, SchedulerRole from .._events import Event, T_Event from .._structures import Job, JobResult, MetadataType, Schedule, Task, TaskDefaults -from .._utils import UnsetValue, unset +from .._utils import UnsetValue, create_repr, unset from ..abc import DataStore, EventBroker, JobExecutor, Subscription, Trigger from .async_ import AsyncScheduler, TaskType @@ -29,7 +29,7 @@ from typing_extensions import Self -@attrs.define(init=False) +@attrs.define(init=False, repr=False) class Scheduler: """ A synchronous wrapper for :class:`AsyncScheduler`. @@ -171,6 +171,9 @@ def _ensure_services_ready( return self._portal + def __repr__(self) -> str: + return create_repr(self, "identity", "role", "data_store", "event_broker") + def cleanup(self) -> None: portal = self._ensure_services_ready() return portal.call(self._async_scheduler.cleanup) diff --git a/src/apscheduler/_utils.py b/src/apscheduler/_utils.py index 467b8f93f..9064eda8e 100644 --- a/src/apscheduler/_utils.py +++ b/src/apscheduler/_utils.py @@ -89,3 +89,18 @@ def merge_metadata( new_metadata.update(metadata) return new_metadata + + +def create_repr(instance: object, *attrnames: str, **kwargs) -> str: + kv_pairs: list[tuple[str, object]] = [] + for attrname in attrnames: + value = getattr(instance, attrname) + if value is not unset and value is not None: + kv_pairs.append((attrname, value)) + + for key, value in kwargs.items(): + if value is not unset and value is not None: + kv_pairs.append((key, value)) + + rendered_attrs = ", ".join(f"{key}={value!r}" for key, value in kv_pairs) + return f"{instance.__class__.__name__}({rendered_attrs})" diff --git a/src/apscheduler/datastores/memory.py b/src/apscheduler/datastores/memory.py index 72a603819..ae575133f 100644 --- a/src/apscheduler/datastores/memory.py +++ b/src/apscheduler/datastores/memory.py @@ -25,12 +25,13 @@ ) from .._exceptions import ConflictingIdError, TaskLookupError from .._structures import Job, JobResult, Schedule, ScheduleResult, Task +from .._utils import create_repr from .base import BaseDataStore max_datetime = datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, tzinfo=timezone.utc) -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class MemoryDataStore(BaseDataStore): """ Stores scheduler data in memory, without serializing it. @@ -49,6 +50,9 @@ class MemoryDataStore(BaseDataStore): _jobs_by_schedule_id: dict[str, set[Job]] = attrs.Factory(partial(defaultdict, set)) _job_results: dict[UUID, JobResult] = attrs.Factory(dict) + def __repr__(self) -> str: + return create_repr(self) + def _find_schedule_index(self, schedule: Schedule) -> int: left_index = bisect_left(self._schedules, schedule) right_index = bisect_right(self._schedules, schedule) diff --git a/src/apscheduler/datastores/mongodb.py b/src/apscheduler/datastores/mongodb.py index 00a77e640..44e2b9019 100644 --- a/src/apscheduler/datastores/mongodb.py +++ b/src/apscheduler/datastores/mongodb.py @@ -42,6 +42,7 @@ TaskLookupError, ) from .._structures import Job, JobResult, Schedule, ScheduleResult, Task +from .._utils import create_repr from ..abc import EventBroker from .base import BaseExternalDataStore @@ -123,7 +124,7 @@ async def create(cls, func: Callable[..., Cursor[T]]) -> AsyncCursor[T]: return AsyncCursor(cursor) -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class MongoDBDataStore(BaseExternalDataStore): """ Uses a MongoDB server to store data. @@ -194,6 +195,10 @@ def __attrs_post_init__(self) -> None: self._jobs = database["jobs"] self._jobs_results = database["job_results"] + def __repr__(self) -> str: + server_descriptions = self._client.topology_description.server_descriptions() + return create_repr(self, host=list(server_descriptions)) + def _initialize(self) -> None: with self._client.start_session() as session: if self.start_from_scratch: diff --git a/src/apscheduler/datastores/sqlalchemy.py b/src/apscheduler/datastores/sqlalchemy.py index 40b8f4304..91070220c 100644 --- a/src/apscheduler/datastores/sqlalchemy.py +++ b/src/apscheduler/datastores/sqlalchemy.py @@ -76,6 +76,7 @@ TaskLookupError, ) from .._structures import Job, JobResult, Schedule, ScheduleResult, Task +from .._utils import create_repr from ..abc import EventBroker from .base import BaseExternalDataStore @@ -121,7 +122,7 @@ class _JobDiscard: exception: Exception | None = None -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class SQLAlchemyDataStore(BaseExternalDataStore): """ Uses a relational database to store data. @@ -189,6 +190,9 @@ def __attrs_post_init__(self) -> None: self._t_jobs = self._metadata.tables[prefix + "jobs"] self._t_job_results = self._metadata.tables[prefix + "job_results"] + def __repr__(self) -> str: + return create_repr(self, url=repr(self._engine.url), schema=self.schema) + def _retry(self) -> tenacity.AsyncRetrying: def after_attempt(retry_state: tenacity.RetryCallState) -> None: self._logger.warning( diff --git a/src/apscheduler/eventbrokers/asyncpg.py b/src/apscheduler/eventbrokers/asyncpg.py index 6c8f1eb11..a4e241005 100644 --- a/src/apscheduler/eventbrokers/asyncpg.py +++ b/src/apscheduler/eventbrokers/asyncpg.py @@ -19,13 +19,14 @@ from .._events import Event from .._exceptions import SerializationError +from .._utils import create_repr from .base import BaseExternalEventBroker if TYPE_CHECKING: from sqlalchemy.ext.asyncio import AsyncEngine -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class AsyncpgEventBroker(BaseExternalEventBroker): """ An asynchronous, asyncpg_ based event broker that uses a PostgreSQL server to @@ -80,6 +81,9 @@ def from_async_sqla_engine( dsn = engine.url.render_as_string(hide_password=False).replace("+asyncpg", "") return cls(dsn, options or {}, **kwargs) + def __repr__(self) -> str: + return create_repr(self, "dsn") + @property def _temporary_failure_exceptions(self) -> tuple[type[Exception], ...]: return OSError, InterfaceError diff --git a/src/apscheduler/eventbrokers/local.py b/src/apscheduler/eventbrokers/local.py index 27a3cfd9b..4101db451 100644 --- a/src/apscheduler/eventbrokers/local.py +++ b/src/apscheduler/eventbrokers/local.py @@ -3,10 +3,11 @@ import attrs from .._events import Event +from .._utils import create_repr from .base import BaseEventBroker -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class LocalEventBroker(BaseEventBroker): """ Asynchronous, local event broker. @@ -17,5 +18,8 @@ class LocalEventBroker(BaseEventBroker): Does not serialize events. """ + def __repr__(self) -> str: + return create_repr(self) + async def publish(self, event: Event) -> None: await self.publish_local(event) diff --git a/src/apscheduler/eventbrokers/mqtt.py b/src/apscheduler/eventbrokers/mqtt.py index 3f07a7949..9771ae7b1 100644 --- a/src/apscheduler/eventbrokers/mqtt.py +++ b/src/apscheduler/eventbrokers/mqtt.py @@ -15,12 +15,13 @@ from paho.mqtt.enums import CallbackAPIVersion from .._events import Event +from .._utils import create_repr from .base import BaseExternalEventBroker ALLOWED_TRANSPORTS = ("mqtt", "mqtts", "ws", "wss", "unix") -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class MQTTEventBroker(BaseExternalEventBroker): """ An event broker that uses an MQTT (v3.1 or v5) broker to broadcast events. @@ -80,6 +81,9 @@ def __attrs_post_init__(self) -> None: elif self.ssl: self._client.tls_set() + def __repr__(self) -> str: + return create_repr(self, "host", "port", "transport") + async def start(self, exit_stack: AsyncExitStack, logger: Logger) -> None: await super().start(exit_stack, logger) self._portal = await exit_stack.enter_async_context(BlockingPortal()) diff --git a/src/apscheduler/eventbrokers/psycopg.py b/src/apscheduler/eventbrokers/psycopg.py index 921bbfd99..615ff0b8a 100644 --- a/src/apscheduler/eventbrokers/psycopg.py +++ b/src/apscheduler/eventbrokers/psycopg.py @@ -18,6 +18,7 @@ from .._events import Event from .._exceptions import SerializationError +from .._utils import create_repr from .._validators import positive_number from .base import BaseExternalEventBroker @@ -29,7 +30,7 @@ def convert_options(value: Mapping[str, Any]) -> dict[str, Any]: return dict(value, autocommit=True) -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class PsycopgEventBroker(BaseExternalEventBroker): """ An asynchronous, psycopg_ based event broker that uses a PostgreSQL server to @@ -91,6 +92,9 @@ def from_async_sqla_engine( ) return cls(conninfo, options or {}, **kwargs) + def __repr__(self) -> str: + return create_repr(self, "conninfo") + @property def _temporary_failure_exceptions(self) -> tuple[type[Exception], ...]: return OSError, InterfaceError diff --git a/src/apscheduler/eventbrokers/redis.py b/src/apscheduler/eventbrokers/redis.py index 8bcdb6c44..45ecf80af 100644 --- a/src/apscheduler/eventbrokers/redis.py +++ b/src/apscheduler/eventbrokers/redis.py @@ -15,10 +15,11 @@ from redis.asyncio.connection import ConnectionPool from .._events import Event +from .._utils import create_repr from .base import BaseExternalEventBroker -@attrs.define(eq=False) +@attrs.define(eq=False, repr=False) class RedisEventBroker(BaseExternalEventBroker): """ An event broker that uses a Redis server to broadcast events. @@ -55,6 +56,9 @@ def __attrs_post_init__(self) -> None: else: self._client = self.client_or_url + def __repr__(self) -> str: + return create_repr(self, "client_or_url") + def _retry(self) -> tenacity.AsyncRetrying: def after_attempt(retry_state: tenacity.RetryCallState) -> None: self._logger.warning( diff --git a/tests/test_datastores.py b/tests/test_datastores.py index 2a5ffaceb..32d755592 100644 --- a/tests/test_datastores.py +++ b/tests/test_datastores.py @@ -4,6 +4,7 @@ from contextlib import AsyncExitStack, asynccontextmanager from datetime import datetime, timedelta, timezone from logging import Logger +from pathlib import Path from typing import TYPE_CHECKING, AsyncGenerator from unittest.mock import Mock @@ -32,6 +33,9 @@ from apscheduler._structures import ScheduleResult from apscheduler.abc import DataStore, EventBroker, Serializer from apscheduler.datastores.base import BaseExternalDataStore +from apscheduler.datastores.memory import MemoryDataStore +from apscheduler.datastores.mongodb import MongoDBDataStore +from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore from apscheduler.triggers.date import DateTrigger if TYPE_CHECKING: @@ -819,3 +823,24 @@ async def test_acquire_jobs_deserialization_failure( # This should not yield any jobs assert await datastore.acquire_jobs("scheduler_id", timedelta(seconds=30), 1) == [] + + +class TestRepr: + async def test_memory(self, memory_store: MemoryDataStore) -> None: + assert repr(memory_store) == "MemoryDataStore()" + + async def test_aiosqlite( + self, aiosqlite_store: SQLAlchemyDataStore, tmp_path: Path + ) -> None: + assert repr(aiosqlite_store) == ( + f"SQLAlchemyDataStore(url='sqlite+aiosqlite:///{tmp_path}/test.db')" + ) + + async def test_psycopg(self, psycopg_async_store: SQLAlchemyDataStore) -> None: + assert repr(psycopg_async_store) == ( + "SQLAlchemyDataStore(url='postgresql+psycopg://postgres:***@localhost/" + "testdb', schema='psycopg_async')" + ) + + async def test_mongodb(self, mongodb_store: MongoDBDataStore) -> None: + assert repr(mongodb_store) == "MongoDBDataStore(host=[('localhost', 27017)])" diff --git a/tests/test_schedulers.py b/tests/test_schedulers.py index 36f730755..f0d57d080 100644 --- a/tests/test_schedulers.py +++ b/tests/test_schedulers.py @@ -133,6 +133,13 @@ def __call__(self) -> int: class TestAsyncScheduler: + def test_repr(self) -> None: + scheduler = AsyncScheduler(identity="my identity") + assert repr(scheduler) == ( + "AsyncScheduler(identity='my identity', role=, " + "data_store=MemoryDataStore(), event_broker=LocalEventBroker())" + ) + async def test_use_before_initialized(self) -> None: scheduler = AsyncScheduler() with pytest.raises( @@ -1057,6 +1064,13 @@ def test_interface_parity(self) -> None: args == sync_args[kind] ), f"Parameter mismatch for {attrname}(): {args} != {sync_args[kind]}" + def test_repr(self) -> None: + scheduler = Scheduler(identity="my identity") + assert repr(scheduler) == ( + "Scheduler(identity='my identity', role=, " + "data_store=MemoryDataStore(), event_broker=LocalEventBroker())" + ) + def test_configure(self) -> None: executor = ThreadPoolJobExecutor() task_defaults = TaskDefaults(job_executor="executor1")