From 0641127bb3341ed11ac1e6d71cf6a6a2d21806a7 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sat, 14 Oct 2023 20:55:35 +0100 Subject: [PATCH 001/107] Improved aggregate examples 7 and 8 by introducing a custom type of value object (the Trick class) and using this in aggregate events and aggregate state, and having this type reconstructed from JSON by Pydantic when reconstructing aggregate state from both recorded aggregate events and snapshots which simply contain string values representing the name of the trick. --- Makefile | 2 +- docs/topics/examples/aggregate7.rst | 6 +++- docs/topics/examples/aggregate8.rst | 6 +++- eventsourcing/application.py | 5 +++- eventsourcing/domain.py | 2 +- .../examples/aggregate7/application.py | 3 +- .../examples/aggregate7/domainmodel.py | 10 +++++-- .../examples/aggregate7/test_application.py | 23 ++++++++++----- .../test_compression_and_encryption.py | 17 ++++++++--- .../aggregate7/test_snapshotting_intervals.py | 17 ++++++++--- .../examples/aggregate8/application.py | 12 +++++--- .../examples/aggregate8/domainmodel.py | 29 +++++++++++++++---- .../examples/aggregate8/test_application.py | 22 ++++++++++---- .../test_compression_and_encryption.py | 11 +++++-- .../aggregate8/test_snapshotting_intervals.py | 4 +-- setup.py | 2 +- 16 files changed, 125 insertions(+), 46 deletions(-) diff --git a/Makefile b/Makefile index 2dbe90da..1bf54cd2 100644 --- a/Makefile +++ b/Makefile @@ -148,7 +148,7 @@ create_postgres_db: .PHONY: updatetools updatetools: pip install -U pip - pip install -U black mypy flake8 flake8-bugbear isort + pip install -U black mypy flake8 flake8-bugbear isort orjson python-coveralls coverage .PHONY: docs docs: diff --git a/docs/topics/examples/aggregate7.rst b/docs/topics/examples/aggregate7.rst index 7c926d90..49db116e 100644 --- a/docs/topics/examples/aggregate7.rst +++ b/docs/topics/examples/aggregate7.rst @@ -21,7 +21,11 @@ that have been deserialised by orjson. One advantage of using Pydantic here is that any custom value objects will be automatically reconstructed without needing to define the transcoding classes that would be needed when using the library's -default ``JSONTranscoder``. +default ``JSONTranscoder``. This is demonstrated in the example below +with the ``Trick`` class, which is used in both aggregate events and +aggregate state, and which is reconstructed from serialised string +values, representing only the name of the trick, from both recorded +aggregate events and from recorded snapshots. Domain model diff --git a/docs/topics/examples/aggregate8.rst b/docs/topics/examples/aggregate8.rst index a4a1bcaf..932dd04d 100644 --- a/docs/topics/examples/aggregate8.rst +++ b/docs/topics/examples/aggregate8.rst @@ -20,7 +20,11 @@ Pydantic model. One advantage of using Pydantic here is that any custom value objects will be automatically reconstructed without needing to define the transcoding classes that would be needed when using the library's -default ``JSONTranscoder``. +default ``JSONTranscoder``. This is demonstrated in the example below +with the ``Trick`` class, which is used in both aggregate events and +aggregate state, and which is reconstructed from serialised string +values, representing only the name of the trick, from both recorded +aggregate events and from recorded snapshots. Domain model diff --git a/eventsourcing/application.py b/eventsourcing/application.py index 35400987..51673d6e 100644 --- a/eventsourcing/application.py +++ b/eventsourcing/application.py @@ -884,7 +884,10 @@ def take_snapshot( aggregate = self.repository.get( aggregate_id, version=version, projector_func=projector_func ) - snapshot = type(self).snapshot_class.take(aggregate) + snapshot_class = getattr( + type(aggregate), "Snapshot", type(self).snapshot_class + ) + snapshot = snapshot_class.take(aggregate) self.snapshots.put([snapshot]) def notify(self, new_events: List[DomainEventProtocol]) -> None: diff --git a/eventsourcing/domain.py b/eventsourcing/domain.py index 68dbf7ed..beb31cfb 100644 --- a/eventsourcing/domain.py +++ b/eventsourcing/domain.py @@ -1556,7 +1556,7 @@ def take(cls: Any, aggregate: Any) -> Any: class CanSnapshotAggregate(HasOriginatorIDVersion, CanCreateTimestamp): topic: str - state: Dict[str, Any] + state: Any @classmethod def take( diff --git a/eventsourcing/examples/aggregate7/application.py b/eventsourcing/examples/aggregate7/application.py index 358a6cc7..e454ba52 100644 --- a/eventsourcing/examples/aggregate7/application.py +++ b/eventsourcing/examples/aggregate7/application.py @@ -4,6 +4,7 @@ from eventsourcing.application import Application from eventsourcing.examples.aggregate7.domainmodel import ( Snapshot, + Trick, add_trick, project_dog, register_dog, @@ -26,7 +27,7 @@ def register_dog(self, name: str) -> UUID: def add_trick(self, dog_id: UUID, trick: str) -> None: dog = self.repository.get(dog_id, projector_func=project_dog) - self.save(add_trick(dog, trick)) + self.save(add_trick(dog, Trick(name=trick))) def get_dog(self, dog_id: UUID) -> Dict[str, Any]: dog = self.repository.get(dog_id, projector_func=project_dog) diff --git a/eventsourcing/examples/aggregate7/domainmodel.py b/eventsourcing/examples/aggregate7/domainmodel.py index 8152b85e..838b775a 100644 --- a/eventsourcing/examples/aggregate7/domainmodel.py +++ b/eventsourcing/examples/aggregate7/domainmodel.py @@ -68,9 +68,13 @@ def project_aggregate( return project_aggregate +class Trick(BaseModel): + name: str + + class Dog(Aggregate): name: str - tricks: Tuple[str, ...] + tricks: Tuple[Trick, ...] class DogRegistered(DomainEvent): @@ -78,7 +82,7 @@ class DogRegistered(DomainEvent): class TrickAdded(DomainEvent): - trick: str + trick: Trick def register_dog(name: str) -> DomainEvent: @@ -90,7 +94,7 @@ def register_dog(name: str) -> DomainEvent: ) -def add_trick(dog: Dog, trick: str) -> DomainEvent: +def add_trick(dog: Dog, trick: Trick) -> DomainEvent: return TrickAdded( originator_id=dog.id, originator_version=dog.version + 1, diff --git a/eventsourcing/examples/aggregate7/test_application.py b/eventsourcing/examples/aggregate7/test_application.py index b1b58bb2..9aa502f2 100644 --- a/eventsourcing/examples/aggregate7/test_application.py +++ b/eventsourcing/examples/aggregate7/test_application.py @@ -1,7 +1,8 @@ +from typing import Tuple from unittest import TestCase from eventsourcing.examples.aggregate7.application import DogSchool -from eventsourcing.examples.aggregate7.domainmodel import project_dog +from eventsourcing.examples.aggregate7.domainmodel import Trick, project_dog class TestDogSchool(TestCase): @@ -16,8 +17,8 @@ def test_dog_school(self) -> None: # Query application state. dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) # Select notifications. notifications = school.notification_log.select(start=1, limit=10) @@ -26,11 +27,19 @@ def test_dog_school(self) -> None: # Take snapshot. school.take_snapshot(dog_id, version=3, projector_func=project_dog) dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) # Continue with snapshotted aggregate. school.add_trick(dog_id, "fetch ball") dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead", "fetch ball") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead", "fetch ball")) + + def assertEqualTricks( + self, actual: Tuple[Trick, ...], expected: Tuple[str, ...] + ) -> None: + self.assertEqual(len(actual), len(expected)) + for i, trick in enumerate(actual): + self.assertIsInstance(trick, Trick) + self.assertEqual(trick.name, expected[i]) diff --git a/eventsourcing/examples/aggregate7/test_compression_and_encryption.py b/eventsourcing/examples/aggregate7/test_compression_and_encryption.py index 8c2d58fb..1abffe17 100644 --- a/eventsourcing/examples/aggregate7/test_compression_and_encryption.py +++ b/eventsourcing/examples/aggregate7/test_compression_and_encryption.py @@ -1,8 +1,9 @@ +from typing import Tuple from unittest import TestCase from eventsourcing.cipher import AESCipher from eventsourcing.examples.aggregate7.application import DogSchool -from eventsourcing.examples.aggregate7.domainmodel import project_dog +from eventsourcing.examples.aggregate7.domainmodel import Trick, project_dog class TestDogSchool(TestCase): @@ -24,7 +25,7 @@ def test_dog_school(self) -> None: # Query application state. dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) # Select notifications. notifications = school.notification_log.select(start=1, limit=10) @@ -34,10 +35,18 @@ def test_dog_school(self) -> None: school.take_snapshot(dog_id, version=3, projector_func=project_dog) dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) # Continue with snapshotted aggregate. school.add_trick(dog_id, "fetch ball") dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead", "fetch ball") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead", "fetch ball")) + + def assertEqualTricks( + self, actual: Tuple[Trick, ...], expected: Tuple[str, ...] + ) -> None: + self.assertEqual(len(actual), len(expected)) + for i, trick in enumerate(actual): + self.assertIsInstance(trick, Trick) + self.assertEqual(trick.name, expected[i]) diff --git a/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py b/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py index 6fd576d3..599b1717 100644 --- a/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py +++ b/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import Tuple, cast from unittest import TestCase from uuid import UUID @@ -6,6 +6,7 @@ from eventsourcing.examples.aggregate7.application import DogSchool from eventsourcing.examples.aggregate7.domainmodel import ( Dog, + Trick, add_trick, project_dog, register_dog, @@ -23,7 +24,7 @@ def register_dog(self, name: str) -> UUID: def add_trick(self, dog_id: UUID, trick: str) -> None: dog = self.repository.get(dog_id, projector_func=project_dog) - event = add_trick(dog, trick) + event = add_trick(dog, Trick(name=trick)) dog = cast(Dog, project_dog(dog, [event])) self.save(dog, event) @@ -58,5 +59,13 @@ def test_dog_school(self) -> None: # Query application state. dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) + + def assertEqualTricks( + self, actual: Tuple[Trick, ...], expected: Tuple[str, ...] + ) -> None: + self.assertEqual(len(actual), len(expected)) + for i, trick in enumerate(actual): + self.assertIsInstance(trick, Trick) + self.assertEqual(trick.name, expected[i]) diff --git a/eventsourcing/examples/aggregate8/application.py b/eventsourcing/examples/aggregate8/application.py index b6d089b6..102c6277 100644 --- a/eventsourcing/examples/aggregate8/application.py +++ b/eventsourcing/examples/aggregate8/application.py @@ -2,7 +2,7 @@ from uuid import UUID from eventsourcing.application import Application -from eventsourcing.examples.aggregate8.domainmodel import Dog, Snapshot +from eventsourcing.examples.aggregate8.domainmodel import Dog, Trick from eventsourcing.examples.aggregate8.persistence import ( OrjsonTranscoder, PydanticMapper, @@ -12,7 +12,6 @@ class DogSchool(Application): is_snapshotting_enabled = True - snapshot_class = Snapshot def register_dog(self, name: str) -> UUID: dog = Dog(name) @@ -21,12 +20,17 @@ def register_dog(self, name: str) -> UUID: def add_trick(self, dog_id: UUID, trick: str) -> None: dog: Dog = self.repository.get(dog_id) - dog.add_trick(trick) + dog.add_trick(Trick(name=trick)) self.save(dog) def get_dog(self, dog_id: UUID) -> Dict[str, Any]: dog: Dog = self.repository.get(dog_id) - return {"name": dog.name, "tricks": tuple(dog.tricks)} + return { + "name": dog.name, + "tricks": tuple(dog.tricks), + "created_on": dog.created_on, + "modified_on": dog.modified_on, + } def construct_mapper(self) -> Mapper: return self.factory.mapper( diff --git a/eventsourcing/examples/aggregate8/domainmodel.py b/eventsourcing/examples/aggregate8/domainmodel.py index a5786c36..fcd4e3b2 100644 --- a/eventsourcing/examples/aggregate8/domainmodel.py +++ b/eventsourcing/examples/aggregate8/domainmodel.py @@ -1,8 +1,8 @@ from datetime import datetime -from typing import Any, Dict, List +from typing import List from uuid import UUID -from pydantic import BaseModel +from pydantic import BaseModel, Extra from eventsourcing.domain import ( Aggregate as BaseAggregate, @@ -30,17 +30,34 @@ class Created(Event, CanInitAggregate): originator_topic: str -class Snapshot(DomainEvent, CanSnapshotAggregate): +class SnapshotState(BaseModel): + class Config: + extra = Extra.allow + + +class AggregateSnapshot(DomainEvent, CanSnapshotAggregate): topic: str - state: Dict[str, Any] + state: SnapshotState + + +class Trick(BaseModel): + name: str + + +class DogState(SnapshotState): + name: str + tricks: List[Trick] class Dog(Aggregate): + class Snapshot(AggregateSnapshot): + state: DogState + @event("Registered") def __init__(self, name: str) -> None: self.name = name - self.tricks: List[str] = [] + self.tricks: List[Trick] = [] @event("TrickAdded") - def add_trick(self, trick: str) -> None: + def add_trick(self, trick: Trick) -> None: self.tricks.append(trick) diff --git a/eventsourcing/examples/aggregate8/test_application.py b/eventsourcing/examples/aggregate8/test_application.py index 6a23b92e..0ef76f9e 100644 --- a/eventsourcing/examples/aggregate8/test_application.py +++ b/eventsourcing/examples/aggregate8/test_application.py @@ -1,6 +1,8 @@ +from typing import Tuple from unittest import TestCase from eventsourcing.examples.aggregate8.application import DogSchool +from eventsourcing.examples.aggregate8.domainmodel import Trick class TestDogSchool(TestCase): @@ -15,8 +17,8 @@ def test_dog_school(self) -> None: # Query application state. dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) # Select notifications. notifications = school.notification_log.select(start=1, limit=10) @@ -25,11 +27,19 @@ def test_dog_school(self) -> None: # Take snapshot. school.take_snapshot(dog_id, version=3) dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead")) # Continue with snapshotted aggregate. school.add_trick(dog_id, "fetch ball") dog = school.get_dog(dog_id) - assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead", "fetch ball") + self.assertEqual(dog["name"], "Fido") + self.assertEqualTricks(dog["tricks"], ("roll over", "play dead", "fetch ball")) + + def assertEqualTricks( + self, actual: Tuple[Trick, ...], expected: Tuple[str, ...] + ) -> None: + self.assertEqual(len(actual), len(expected)) + for i, trick in enumerate(actual): + self.assertIsInstance(trick, Trick) + self.assertEqual(trick.name, expected[i]) diff --git a/eventsourcing/examples/aggregate8/test_compression_and_encryption.py b/eventsourcing/examples/aggregate8/test_compression_and_encryption.py index 0cd74fcb..454f0407 100644 --- a/eventsourcing/examples/aggregate8/test_compression_and_encryption.py +++ b/eventsourcing/examples/aggregate8/test_compression_and_encryption.py @@ -2,6 +2,7 @@ from eventsourcing.cipher import AESCipher from eventsourcing.examples.aggregate8.application import DogSchool +from eventsourcing.examples.aggregate8.domainmodel import Trick class TestDogSchool(TestCase): @@ -23,7 +24,7 @@ def test_dog_school(self) -> None: # Query application state. dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + assert dog["tricks"] == (Trick(name="roll over"), Trick(name="play dead")) # Select notifications. notifications = school.notification_log.select(start=1, limit=10) @@ -33,10 +34,14 @@ def test_dog_school(self) -> None: school.take_snapshot(dog_id, version=3) dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + assert dog["tricks"] == (Trick(name="roll over"), Trick(name="play dead")) # Continue with snapshotted aggregate. school.add_trick(dog_id, "fetch ball") dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead", "fetch ball") + assert dog["tricks"] == ( + Trick(name="roll over"), + Trick(name="play dead"), + Trick(name="fetch ball"), + ) diff --git a/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py b/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py index c1cf854b..078bbd46 100644 --- a/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py +++ b/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py @@ -1,7 +1,7 @@ from unittest import TestCase from eventsourcing.examples.aggregate8.application import DogSchool -from eventsourcing.examples.aggregate8.domainmodel import Dog +from eventsourcing.examples.aggregate8.domainmodel import Dog, Trick class SubDogSchool(DogSchool): @@ -27,4 +27,4 @@ def test_dog_school(self) -> None: # Query application state. dog = school.get_dog(dog_id) assert dog["name"] == "Fido" - assert dog["tricks"] == ("roll over", "play dead") + assert dog["tricks"] == (Trick(name="roll over"), Trick(name="play dead")) diff --git a/setup.py b/setup.py index 302c6950..f1c1e4cb 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ + crypto_requires + [ "Sphinx==4.2.0", - "sphinx_rtd_theme==1.0.0", + "sphinx_rtd_theme==1.3.0", ] ) From e34dd84585683394036bc42a0251f10c5c7504a9 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sun, 15 Oct 2023 15:07:18 +0100 Subject: [PATCH 002/107] Improved the Synopsis in introduction.rst. --- docs/topics/introduction.rst | 56 +++++++++++++++++++++++++----------- eventsourcing/application.py | 22 ++++++++++++-- 2 files changed, 59 insertions(+), 19 deletions(-) diff --git a/docs/topics/introduction.rst b/docs/topics/introduction.rst index de05d54d..91edfd8d 100644 --- a/docs/topics/introduction.rst +++ b/docs/topics/introduction.rst @@ -34,8 +34,8 @@ The "live coding" video below shows how to do event sourcing with Python in less Synopsis ======== -Use the library's :class:`~eventsourcing.domain.Aggregate` class and the :func:`@event` decorator to define -event-sourced aggregates. +Use the library's :class:`~eventsourcing.domain.Aggregate` class and the +:func:`@event` decorator to define event-sourced aggregates. .. code-block:: python @@ -52,12 +52,37 @@ event-sourced aggregates. def add_trick(self, trick): self.tricks.append(trick) -Aggregate events will be triggered when decorated -methods are called, and the decorated method bodies will be used to mutate -the state of the aggregate. +The :func:`@event` decorator can be used on "public" or +"private" methods. + +Call the aggregate class to create a new aggregate. Call the decorated methods to evolve +aggregate state. + +.. code-block:: python + + dog = Dog('Fido') + dog.add_trick('roll over') + +New aggregate events will be triggered when decorated methods are called. The decorated method bodies are used to +mutate the state of the aggregate, immediately after the decorated methods are called, and later when reconstructing +aggregates from stored events. New aggregate events can be collected from aggregates using the +:func:`~eventsourcing.domain.Aggregate.collect_events` method. + +.. code-block:: python + + new_events = dog.collect_events() + assert len(new_events) == 2 + Use the library's :class:`~eventsourcing.application.Application` class to define event-sourced applications. -Add command and query methods that use event-sourced aggregates. +Application objects combine the aggregates of a domain model with persistence infrastructure that stores aggregate +events. + +Add application command methods that create and evolve aggregate state. Add application query methods that present +current state. The application's :func:`~eventsourcing.application.Application.save` method collects new events +from aggregates and records them in an event store. The :func:`~eventsourcing.application.Repository.get` +method of the application's :attr:`~eventsourcing.application.Application.repository` reconstructs aggregates +from previously recorded events. .. code-block:: python @@ -70,20 +95,15 @@ Add command and query methods that use event-sourced aggregates. self.save(dog) return dog.id - def add_trick(self, dog_id, trick): - dog = self.repository.get(dog_id) - dog.add_trick(trick) - self.save(dog) - def get_dog(self, dog_id): dog = self.repository.get(dog_id) return {'name': dog.name, 'tricks': tuple(dog.tricks)} + def add_trick(self, dog_id, trick): + dog = self.repository.get(dog_id) + dog.add_trick(trick) + self.save(dog) -An application combines domain model aggregates persistence infrastructure. -Aggregate events are collected and stored by the appliation :func:`~eventsourcing.application.Application.save` -method. Aggregate events are retrieved and used to reconstruct aggregates -by the repository :func:`~eventsourcing.application.Repository.get` method. Construct an application object by calling the application class. @@ -114,7 +134,8 @@ Access the state of the application by calling the application's query methods. assert dog_details['name'] == 'Fido' assert dog_details['tricks'] == ('roll over', 'fetch ball') -Select event notifications from the application's notification log. +Propagate the state of an application with the :func:`~eventsourcing.application.NotificationLog.select` method of the +:attr:`~eventsourcing.application.Application.notification_log`. .. code-block:: python @@ -126,6 +147,9 @@ Select event notifications from the application's notification log. assert notifications[1].id == 2 assert notifications[2].id == 3 +An application's notification log presents all the aggregate events of an application in the order they were recorded +as a sequence of event notifications. In this way, the state of the application can be propagated and processed in a +reliable way. Please read the :doc:`Tutorial ` for more information. diff --git a/eventsourcing/application.py b/eventsourcing/application.py index 51673d6e..0163db22 100644 --- a/eventsourcing/application.py +++ b/eventsourcing/application.py @@ -672,13 +672,29 @@ def __init__(self, env: Optional[EnvType] = None) -> None: self.snapshots: Optional[EventStore] = None if self.factory.is_snapshotting_enabled(): self.snapshots = self.construct_snapshot_store() - self.repository = self.construct_repository() - self.notification_log = self.construct_notification_log() + self._repository = self.construct_repository() + self._notification_log = self.construct_notification_log() self.closing = Event() self.previous_max_notification_id: Optional[ int ] = self.recorder.max_notification_id() + @property + def repository(self) -> Repository: + """ + An application's repository reconstructs aggregates from stored events. + """ + return self._repository + + @property + def notification_log(self) -> LocalNotificationLog: + """ + An application's notification log presents all the aggregate events + of an application in the order they were recorded as a sequence of event + notifications. + """ + return self._notification_log + @property def log(self) -> LocalNotificationLog: warn( @@ -686,7 +702,7 @@ def log(self) -> LocalNotificationLog: DeprecationWarning, stacklevel=2, ) - return self.notification_log + return self._notification_log def construct_env(self, name: str, env: Optional[EnvType] = None) -> Environment: """ From befbc465ee0a09c28493062391c2ca69bedb807f Mon Sep 17 00:00:00 2001 From: johnbywater Date: Mon, 16 Oct 2023 18:49:16 +0100 Subject: [PATCH 003/107] Changes for Pydantic v2 (model_dump() and Config.frozen). --- eventsourcing/examples/aggregate7/domainmodel.py | 9 +++------ eventsourcing/examples/aggregate7/persistence.py | 2 +- eventsourcing/examples/aggregate8/domainmodel.py | 2 +- eventsourcing/examples/aggregate8/persistence.py | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/eventsourcing/examples/aggregate7/domainmodel.py b/eventsourcing/examples/aggregate7/domainmodel.py index 838b775a..edf3eb33 100644 --- a/eventsourcing/examples/aggregate7/domainmodel.py +++ b/eventsourcing/examples/aggregate7/domainmodel.py @@ -16,7 +16,7 @@ class DomainEvent(BaseModel): timestamp: datetime class Config: - allow_mutation = False + frozen = True def create_timestamp() -> datetime: @@ -30,16 +30,13 @@ class Aggregate(BaseModel): modified_on: datetime class Config: - allow_mutation = False + frozen = True class Snapshot(DomainEvent): topic: str state: Dict[str, Any] - class Config: - allow_mutation = False - @classmethod def take(cls, aggregate: Aggregate) -> Snapshot: return Snapshot( @@ -47,7 +44,7 @@ def take(cls, aggregate: Aggregate) -> Snapshot: originator_version=aggregate.version, timestamp=create_timestamp(), topic=get_topic(type(aggregate)), - state=aggregate.dict(), + state=aggregate.model_dump(), ) diff --git a/eventsourcing/examples/aggregate7/persistence.py b/eventsourcing/examples/aggregate7/persistence.py index 6907994d..e328730f 100644 --- a/eventsourcing/examples/aggregate7/persistence.py +++ b/eventsourcing/examples/aggregate7/persistence.py @@ -17,7 +17,7 @@ class PydanticMapper(Mapper): def to_stored_event(self, domain_event: DomainEventProtocol) -> StoredEvent: topic = get_topic(domain_event.__class__) - event_state = cast(BaseModel, domain_event).dict() + event_state = cast(BaseModel, domain_event).model_dump() stored_state = self.transcoder.encode(event_state) if self.compressor: stored_state = self.compressor.compress(stored_state) diff --git a/eventsourcing/examples/aggregate8/domainmodel.py b/eventsourcing/examples/aggregate8/domainmodel.py index fcd4e3b2..0f62e7d8 100644 --- a/eventsourcing/examples/aggregate8/domainmodel.py +++ b/eventsourcing/examples/aggregate8/domainmodel.py @@ -19,7 +19,7 @@ class DomainEvent(BaseModel): timestamp: datetime class Config: - allow_mutation = False + frozen = True class Aggregate(BaseAggregate): diff --git a/eventsourcing/examples/aggregate8/persistence.py b/eventsourcing/examples/aggregate8/persistence.py index 6907994d..e328730f 100644 --- a/eventsourcing/examples/aggregate8/persistence.py +++ b/eventsourcing/examples/aggregate8/persistence.py @@ -17,7 +17,7 @@ class PydanticMapper(Mapper): def to_stored_event(self, domain_event: DomainEventProtocol) -> StoredEvent: topic = get_topic(domain_event.__class__) - event_state = cast(BaseModel, domain_event).dict() + event_state = cast(BaseModel, domain_event).model_dump() stored_state = self.transcoder.encode(event_state) if self.compressor: stored_state = self.compressor.compress(stored_state) From 7d61ae237d2c98f25a973f45b881e243788bc113 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Mon, 16 Oct 2023 18:52:07 +0100 Subject: [PATCH 004/107] Added example "content management system". --- Makefile | 2 +- docs/topics/examples.rst | 8 + .../examples/content-management-system.rst | 89 ++++++++ docs/topics/examples/searchable-content.rst | 47 ++-- .../examples/contentmanagement/application.py | 6 +- .../examples/contentmanagement/domainmodel.py | 22 +- .../examples/contentmanagement/test.py | 20 +- .../contentmanagementsystem/__init__.py | 0 .../contentmanagementsystem/application.py | 49 +++++ .../contentmanagementsystem/postgres.py | 17 ++ .../contentmanagementsystem/sqlite.py | 17 ++ .../contentmanagementsystem/system.py | 12 ++ .../contentmanagementsystem/test_system.py | 172 +++++++++++++++ .../examples/searchablecontent/application.py | 19 +- .../examples/searchablecontent/persistence.py | 17 +- .../examples/searchablecontent/postgres.py | 200 ++++++++++-------- .../examples/searchablecontent/sqlite.py | 148 +++++++------ ...archablecontent.py => test_application.py} | 10 +- .../searchablecontent/test_recorder.py | 65 ++++++ eventsourcing/postgres.py | 10 +- eventsourcing/sqlite.py | 10 +- 21 files changed, 716 insertions(+), 224 deletions(-) create mode 100644 docs/topics/examples/content-management-system.rst create mode 100644 eventsourcing/examples/contentmanagementsystem/__init__.py create mode 100644 eventsourcing/examples/contentmanagementsystem/application.py create mode 100644 eventsourcing/examples/contentmanagementsystem/postgres.py create mode 100644 eventsourcing/examples/contentmanagementsystem/sqlite.py create mode 100644 eventsourcing/examples/contentmanagementsystem/system.py create mode 100644 eventsourcing/examples/contentmanagementsystem/test_system.py rename eventsourcing/examples/searchablecontent/{test_searchablecontent.py => test_application.py} (92%) create mode 100644 eventsourcing/examples/searchablecontent/test_recorder.py diff --git a/Makefile b/Makefile index 1bf54cd2..20c289ca 100644 --- a/Makefile +++ b/Makefile @@ -148,7 +148,7 @@ create_postgres_db: .PHONY: updatetools updatetools: pip install -U pip - pip install -U black mypy flake8 flake8-bugbear isort orjson python-coveralls coverage + pip install -U black mypy flake8 flake8-bugbear isort python-coveralls coverage orjson pydantic .PHONY: docs docs: diff --git a/docs/topics/examples.rst b/docs/topics/examples.rst index bf4dbca5..188b28d9 100644 --- a/docs/topics/examples.rst +++ b/docs/topics/examples.rst @@ -42,3 +42,11 @@ Example applications examples/content-management examples/searchable-timestamps examples/searchable-content + +Example systems +=============== + +.. toctree:: + :maxdepth: 2 + + examples/content-management-system diff --git a/docs/topics/examples/content-management-system.rst b/docs/topics/examples/content-management-system.rst new file mode 100644 index 00000000..e9797e0d --- /dev/null +++ b/docs/topics/examples/content-management-system.rst @@ -0,0 +1,89 @@ +System 1 - Content management system +==================================== + +In this example, event notifications from the ``ContentManagementApplication`` from +:doc:`/topics/examples/content-management` are processed and projected into an +eventually-consistent full text search index, a searchable "materialized view" of +the pages' body text just like :doc:`/topics/examples/searchable-content`. + +This is an example of CQRS. By separating the search engine "read model" from the content management +"write model", the commands that update pages will perform faster. But, more importantly, the search +engine can be redesigned and rebuilt by reprocessing those events. The projected searchable content +can be deleted and rebuilt, perhaps also to include page titles, or timestamps, or other information +contained in the domain events such as the authors, because it is updated by processing events. +This is the main advantage of "CQRS" over the "inline" technique used in :doc:`/topics/examples/searchable-content` +where the search index is simply updated whenever new events are recorded. Please note, it is possible +to migrate from the "inline" technique to CQRS, by adding the downstream processing and then removing +the inline updating, since the domain model is already event sourced. Similarly, other projections +can be added to work alongside and concurrently with the updating of the search engine. + +Application +----------- + +The ``SearchIndexApplication`` defined below is a :class:`~eventsourcing.system.ProcessApplication`. +Its ``policy()`` function is coded to process the ``Page.Created`` and ``Page.BodyUpdated`` domain +events of the ``ContentManagementApplication``. It also has a ``search()`` method that returns +a list of page IDs. + +It that works in a similar way to the ``SearchableContentApplication`` class in +:doc:`/topics/examples/searchable-content`, by setting variable keyword arguments +``insert_pages`` and ``update_pages``. However, rather than populating variable +keyword arguments in the ``save()`` method, it populates ``insert_pages`` and ``update_pages`` +within its ``policy()`` function. The ``insert_pages`` and ``update_pages`` arguments are set +on the :class:`~eventsourcing.application.ProcessingEvent` object passed into the ``policy()`` +function, which carries an event notification ID that indicates the position +in the application sequence of the domain event that is being processed. + +The application will be configured to run with a custom :class:`~eventsourcing.persistence.ProcessRecorder` +so that search index records will be updated atomically with the inserting of a tracking record which +indicates which upstream event notification has been processed. + +Because the ``Page.BodyUpdated`` event carries only the ``diff`` of the page body, the +``policy()`` function must first select the current page body from its own records +and then apply the diff as a patch. The "exactly once" semantics provided by the library's +system module guarantees that the diffs will always be applied in the correct order. Without +this guarantee, the projection could become inconsistent, with the consequence that the diffs +will fail to be applied. + +.. literalinclude:: ../../../eventsourcing/examples/contentmanagementsystem/application.py + +System +------ + +A :class:`~eventsourcing.system.System` of applications is defined, in which the +``SearchIndexApplication`` follows the ``ContentManagementApplication``. This system +can then be used in any :class:`~eventsourcing.system.Runner`. + +.. literalinclude:: ../../../eventsourcing/examples/contentmanagementsystem/system.py + +PostgreSQL +---------- + +The ``PostgresSearchableContentRecorder`` from :doc:`/topics/examples/searchable-content` +is used to define a custom :class:`~eventsourcing.persistence.ProcessRecorder` for PostgreSQL. +The PostgreSQL :class:`~eventsourcing.postgres.Factory` class is extended to involve this custom recorder +in a custom persistence module so that it can be used by the ``SearchIndexApplication``. + + +.. literalinclude:: ../../../eventsourcing/examples/contentmanagementsystem/postgres.py + +SQLite +------ + +The ``SqliteSearchableContentRecorder`` from :doc:`/topics/examples/searchable-content` +is used to define a custom :class:`~eventsourcing.persistence.ProcessRecorder` for SQLite. +The SQLite :class:`~eventsourcing.sqlite.Factory` class is extended to involve this custom recorder +in a custom persistence module so that it can be used by the ``SearchIndexApplication``. + +.. literalinclude:: ../../../eventsourcing/examples/contentmanagementsystem/sqlite.py + + +Test case +--------- + +The test case ``ContentManagementSystemTestCase`` creates three pages, for 'animals', 'plants' +and 'minerals'. Content is added to the pages. The content is searched with various queries and +the search results are checked. The test is executed twice, once with the application configured +for both PostgreSQL, and once for SQLite. + +.. literalinclude:: ../../../eventsourcing/examples/contentmanagementsystem/test_system.py diff --git a/docs/topics/examples/searchable-content.rst b/docs/topics/examples/searchable-content.rst index de8fc0c5..e6f301df 100644 --- a/docs/topics/examples/searchable-content.rst +++ b/docs/topics/examples/searchable-content.rst @@ -11,13 +11,12 @@ to support full text search queries in an event-sourced application with both Application ----------- -The application class ``SearchableContentApplication`` extends the ``WikiApplication`` -class presented in the :doc:`content management example `. -It extends the :func:`~eventsourcing.application.Application.save` method by using the variable keyword parameters (``**kwargs``) -of the application :func:`~eventsourcing.application.Application.save` method to pass down to the recorder extra -information that will be used to update a searchable index of the event-sourced -content. It also introduces a ``search()`` method that expects a ``query`` -argument and returns a list of pages. +The application class ``SearchableContentApplication`` extends the ``ContentManagementApplication`` +class presented in :doc:`/topics/examples/content-management`. +Its :func:`~eventsourcing.application.Application.save` method sets the variable keyword +parameters ``insert_pages`` and ``update_pages``. It also introduces a ``search()`` method that +expects a ``query`` argument and returns a list of pages. The application's recorders are expected +to be receptive to these variable keyword parameters and to support the ``search_pages()`` function. .. literalinclude:: ../../../eventsourcing/examples/searchablecontent/application.py @@ -25,36 +24,33 @@ argument and returns a list of pages. Persistence ----------- -The recorder classes ``SearchableContentApplicationRecorder`` extend the PostgreSQL -and SQLite ``ApplicationRecorder`` classes by creating a table that contains the current -page body text. They define SQL statements that insert, update, and search the rows -of the table using search query syntax similar to the one used by web search engines. -They define a ``search_page_bodies()`` method which returns the page slugs for page -bodies that match the given search query. +The recorder class ``SearchableContentRecorder`` extends the ``AggregateRecorder`` by +defining abstract methods to search and select pages. These methods will be implemented +for both PostgreSQL and SQLite, which will also create custom tables for page content with +a full text search indexes. .. literalinclude:: ../../../eventsourcing/examples/searchablecontent/persistence.py -The application recorder classes extend the ``_insert_events()`` method by inserting -and updating rows, according to the information passed down from the application -through the :func:`~eventsourcing.application.Application.save` method's variable keyword parameters. - -The infrastructure factory classes ``SearchableContentInfrastructureFactory`` extend the -PostgreSQL and SQLite ``Factory`` class by overriding the ``application_recorder()`` method -so that a ``SearchableContentApplicationRecorder`` is constructed as the application recorder. +The ``_insert_events()`` methods of the PostgreSQL and SQLite recorders are extended, so that +rows are inserted and updated, according to the information passed down from the application +in the variable keyword arguments ``insert_pages`` and ``update_pages``. PostgreSQL ---------- The PostgreSQL recorder uses a GIN index and the ``websearch_to_tsquery()`` function. +The PostgreSQL :class:`~eventsourcing.postgres.Factory` class is extended to involve this custom recorder +in a custom PostgreSQL persistence module so that it can be used by the ``ContentManagementApplication``. .. literalinclude:: ../../../eventsourcing/examples/searchablecontent/postgres.py - SQLite ------ The SQLite recorder uses a virtual table and the ``MATCH`` operator. +The SQLite :class:`~eventsourcing.sqlite.Factory` class is extended to involve this custom recorder +in a custom SQLite persistence module so that it can be used by the ``ContentManagementApplication``. .. literalinclude:: ../../../eventsourcing/examples/searchablecontent/sqlite.py @@ -62,9 +58,10 @@ The SQLite recorder uses a virtual table and the ``MATCH`` operator. Test case --------- -The test case ``SearchableContentTestCase`` uses the application to create three -pages, for 'animals', 'plants' and 'minerals'. Content is added to the pages. The +The test case ``SearchableContentApplicationTestCase`` uses the ``SearchableContentApplication`` to +create three pages, for 'animals', 'plants' and 'minerals'. Content is added to the pages. The content is searched with various queries and the search results are checked. The -test is executed twice, with the application configured for both PostgreSQL and SQLite. +test case is executed twice, once with the PostgreSQL persistence module, and once with the +SQLite persistence module. -.. literalinclude:: ../../../eventsourcing/examples/searchablecontent/test_searchablecontent.py +.. literalinclude:: ../../../eventsourcing/examples/searchablecontent/test_application.py diff --git a/eventsourcing/examples/contentmanagement/application.py b/eventsourcing/examples/contentmanagement/application.py index df53b56a..2fb9ae98 100644 --- a/eventsourcing/examples/contentmanagement/application.py +++ b/eventsourcing/examples/contentmanagement/application.py @@ -24,10 +24,14 @@ def create_page(self, title: str, slug: str) -> None: index_entry = Index(slug, ref=page.id) self.save(page, page_logged, index_entry) - def get_page_details(self, slug: str) -> PageDetailsType: + def get_page_by_slug(self, slug: str) -> PageDetailsType: page = self._get_page_by_slug(slug) return self._details_from_page(page) + def get_page_by_id(self, page_id: UUID) -> PageDetailsType: + page = self._get_page_by_id(page_id) + return self._details_from_page(page) + def _details_from_page(self, page: Page) -> PageDetailsType: return { "title": page.title, diff --git a/eventsourcing/examples/contentmanagement/domainmodel.py b/eventsourcing/examples/contentmanagement/domainmodel.py index d0fea5df..c53c80f2 100644 --- a/eventsourcing/examples/contentmanagement/domainmodel.py +++ b/eventsourcing/examples/contentmanagement/domainmodel.py @@ -11,19 +11,24 @@ user_id_cvar: ContextVar[Optional[UUID]] = ContextVar("user_id", default=None) -@dataclass class Page(Aggregate): - title: str - slug: str - body: str = "" - modified_by: Optional[UUID] = field(default=None, init=False) - class Event(Aggregate.Event): user_id: Optional[UUID] = field(default_factory=user_id_cvar.get, init=False) def apply(self, aggregate: Aggregate) -> None: cast(Page, aggregate).modified_by = self.user_id + class Created(Event, Aggregate.Created): + title: str + slug: str + body: str + + def __init__(self, title: str, slug: str, body: str = ""): + self.title = title + self.slug = slug + self.body = body + self.modified_by: Optional[UUID] = None + @event("SlugUpdated") def update_slug(self, slug: str) -> None: self.slug = slug @@ -35,7 +40,10 @@ def update_title(self, title: str) -> None: def update_body(self, body: str) -> None: self._update_body(create_diff(old=self.body, new=body)) - @event("BodyUpdated") + class BodyUpdated(Event): + diff: str + + @event(BodyUpdated) def _update_body(self, diff: str) -> None: self.body = apply_patch(old=self.body, diff=diff) diff --git a/eventsourcing/examples/contentmanagement/test.py b/eventsourcing/examples/contentmanagement/test.py index 29646bc1..9836d6c9 100644 --- a/eventsourcing/examples/contentmanagement/test.py +++ b/eventsourcing/examples/contentmanagement/test.py @@ -26,7 +26,7 @@ def test(self) -> None: # Check the page doesn't exist. with self.assertRaises(PageNotFound): - app.get_page_details(slug="welcome") + app.get_page_by_slug(slug="welcome") # Check the list of pages is empty. pages = list(app.get_pages()) @@ -36,7 +36,7 @@ def test(self) -> None: app.create_page(title="Welcome", slug="welcome") # Present page identified by the given slug. - page = app.get_page_details(slug="welcome") + page = app.get_page_by_slug(slug="welcome") # Check we got a dict that has the given title and slug. self.assertEqual(page["title"], "Welcome") @@ -48,7 +48,7 @@ def test(self) -> None: app.update_title(slug="welcome", title="Welcome Visitors") # Check the title was updated. - page = app.get_page_details(slug="welcome") + page = app.get_page_by_slug(slug="welcome") self.assertEqual(page["title"], "Welcome Visitors") self.assertEqual(page["modified_by"], user_id) @@ -57,10 +57,10 @@ def test(self) -> None: # Check the index was updated. with self.assertRaises(PageNotFound): - app.get_page_details(slug="welcome") + app.get_page_by_slug(slug="welcome") # Check we can get the page by the new slug. - page = app.get_page_details(slug="welcome-visitors") + page = app.get_page_by_slug(slug="welcome-visitors") self.assertEqual(page["title"], "Welcome Visitors") self.assertEqual(page["slug"], "welcome-visitors") @@ -68,14 +68,14 @@ def test(self) -> None: app.update_body(slug="welcome-visitors", body="Welcome to my wiki") # Check the body was updated. - page = app.get_page_details(slug="welcome-visitors") + page = app.get_page_by_slug(slug="welcome-visitors") self.assertEqual(page["body"], "Welcome to my wiki") # Update the body. app.update_body(slug="welcome-visitors", body="Welcome to this wiki") # Check the body was updated. - page = app.get_page_details(slug="welcome-visitors") + page = app.get_page_by_slug(slug="welcome-visitors") self.assertEqual(page["body"], "Welcome to this wiki") # Update the body. @@ -89,7 +89,7 @@ def test(self) -> None: ) # Check the body was updated. - page = app.get_page_details(slug="welcome-visitors") + page = app.get_page_by_slug(slug="welcome-visitors") self.assertEqual( page["body"], """ @@ -120,7 +120,7 @@ def test(self) -> None: ) # Check 'modified_by' changed. - page = app.get_page_details(slug="welcome-visitors") + page = app.get_page_by_slug(slug="welcome-visitors") self.assertEqual(page["title"], "Welcome Visitors") self.assertEqual(page["modified_by"], user_id) @@ -173,6 +173,6 @@ def test(self) -> None: # that was previously being used. app.update_slug("welcome-visitors", "welcome") - page = app.get_page_details(slug="welcome") + page = app.get_page_by_slug(slug="welcome") self.assertEqual(page["title"], "Welcome Visitors") self.assertEqual(page["modified_by"], user_id) diff --git a/eventsourcing/examples/contentmanagementsystem/__init__.py b/eventsourcing/examples/contentmanagementsystem/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/eventsourcing/examples/contentmanagementsystem/application.py b/eventsourcing/examples/contentmanagementsystem/application.py new file mode 100644 index 00000000..7598079d --- /dev/null +++ b/eventsourcing/examples/contentmanagementsystem/application.py @@ -0,0 +1,49 @@ +from typing import List, cast +from uuid import UUID + +from eventsourcing.application import ProcessingEvent +from eventsourcing.domain import DomainEventProtocol +from eventsourcing.examples.contentmanagement.domainmodel import Page +from eventsourcing.examples.contentmanagement.utils import apply_patch +from eventsourcing.examples.searchablecontent.persistence import ( + SearchableContentRecorder, +) +from eventsourcing.system import ProcessApplication + + +class SearchIndexApplication(ProcessApplication): + env = { + "COMPRESSOR_TOPIC": "gzip", + } + + def policy( + self, + domain_event: DomainEventProtocol, + processing_event: ProcessingEvent, + ) -> None: + if isinstance(domain_event, Page.Created): + processing_event.saved_kwargs["insert_pages"] = [ + ( + domain_event.originator_id, + domain_event.slug, + domain_event.title, + domain_event.body, + ) + ] + elif isinstance(domain_event, Page.BodyUpdated): + recorder = cast(SearchableContentRecorder, self.recorder) + page_id = domain_event.originator_id + page_slug, page_title, page_body = recorder.select_page(page_id) + page_body = apply_patch(page_body, domain_event.diff) + processing_event.saved_kwargs["update_pages"] = [ + ( + page_id, + page_slug, + page_title, + page_body, + ) + ] + + def search(self, query: str) -> List[UUID]: + recorder = cast(SearchableContentRecorder, self.recorder) + return recorder.search_pages(query) diff --git a/eventsourcing/examples/contentmanagementsystem/postgres.py b/eventsourcing/examples/contentmanagementsystem/postgres.py new file mode 100644 index 00000000..cbe5dbb1 --- /dev/null +++ b/eventsourcing/examples/contentmanagementsystem/postgres.py @@ -0,0 +1,17 @@ +from eventsourcing.examples.searchablecontent.postgres import ( + PostgresSearchableContentRecorder, +) +from eventsourcing.postgres import Factory, PostgresProcessRecorder + + +class SearchableContentProcessRecorder( + PostgresSearchableContentRecorder, PostgresProcessRecorder +): + pass + + +class SearchableContentInfrastructureFactory(Factory): + process_recorder_class = SearchableContentProcessRecorder + + +del Factory diff --git a/eventsourcing/examples/contentmanagementsystem/sqlite.py b/eventsourcing/examples/contentmanagementsystem/sqlite.py new file mode 100644 index 00000000..5e3ec6fd --- /dev/null +++ b/eventsourcing/examples/contentmanagementsystem/sqlite.py @@ -0,0 +1,17 @@ +from eventsourcing.examples.searchablecontent.sqlite import ( + SQLiteSearchableContentRecorder, +) +from eventsourcing.sqlite import Factory, SQLiteProcessRecorder + + +class SearchableContentProcessRecorder( + SQLiteSearchableContentRecorder, SQLiteProcessRecorder +): + pass + + +class SearchableContentInfrastructureFactory(Factory): + process_recorder_class = SearchableContentProcessRecorder + + +del Factory diff --git a/eventsourcing/examples/contentmanagementsystem/system.py b/eventsourcing/examples/contentmanagementsystem/system.py new file mode 100644 index 00000000..b4b61ef9 --- /dev/null +++ b/eventsourcing/examples/contentmanagementsystem/system.py @@ -0,0 +1,12 @@ +from eventsourcing.examples.contentmanagement.application import ( + ContentManagementApplication, +) +from eventsourcing.examples.contentmanagementsystem.application import ( + SearchIndexApplication, +) +from eventsourcing.system import System + + +class ContentManagementSystem(System): + def __init__(self) -> None: + super().__init__(pipes=[[ContentManagementApplication, SearchIndexApplication]]) diff --git a/eventsourcing/examples/contentmanagementsystem/test_system.py b/eventsourcing/examples/contentmanagementsystem/test_system.py new file mode 100644 index 00000000..404e1416 --- /dev/null +++ b/eventsourcing/examples/contentmanagementsystem/test_system.py @@ -0,0 +1,172 @@ +from typing import Dict +from unittest import TestCase +from uuid import uuid4 + +from eventsourcing.examples.contentmanagement.application import ( + ContentManagementApplication, +) +from eventsourcing.examples.contentmanagement.domainmodel import user_id_cvar +from eventsourcing.examples.contentmanagementsystem.application import ( + SearchIndexApplication, +) +from eventsourcing.examples.contentmanagementsystem.system import ( + ContentManagementSystem, +) +from eventsourcing.postgres import PostgresDatastore +from eventsourcing.system import SingleThreadedRunner +from eventsourcing.tests.postgres_utils import drop_postgres_table + + +class ContentManagementSystemTestCase(TestCase): + env: Dict[str, str] = {} + + def test_system(self) -> None: + runner = SingleThreadedRunner(system=ContentManagementSystem(), env=self.env) + runner.start() + + content_management_app = runner.get(ContentManagementApplication) + search_index_app = runner.get(SearchIndexApplication) + + # Set user_id context variable. + user_id = uuid4() + user_id_cvar.set(user_id) + + # Create empty pages. + content_management_app.create_page(title="Animals", slug="animals") + content_management_app.create_page(title="Plants", slug="plants") + content_management_app.create_page(title="Minerals", slug="minerals") + + # Search, expect no results. + self.assertEqual(0, len(search_index_app.search("cat"))) + self.assertEqual(0, len(search_index_app.search("rose"))) + self.assertEqual(0, len(search_index_app.search("calcium"))) + + # Update the pages. + content_management_app.update_body(slug="animals", body="cat") + content_management_app.update_body(slug="plants", body="rose") + content_management_app.update_body(slug="minerals", body="calcium") + + # Search for single words. + page_ids = search_index_app.search("cat") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "animals") + self.assertEqual(page["body"], "cat") + + page_ids = search_index_app.search("rose") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "plants") + self.assertEqual(page["body"], "rose") + + page_ids = search_index_app.search("calcium") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "minerals") + self.assertEqual(page["body"], "calcium") + + self.assertEqual(len(search_index_app.search("dog")), 0) + self.assertEqual(len(search_index_app.search("bluebell")), 0) + self.assertEqual(len(search_index_app.search("zinc")), 0) + + # Update the pages again. + content_management_app.update_body(slug="animals", body="cat dog zebra") + content_management_app.update_body(slug="plants", body="bluebell rose jasmine") + content_management_app.update_body(slug="minerals", body="iron zinc calcium") + + # Search for single words. + page_ids = search_index_app.search("cat") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "animals") + self.assertEqual(page["body"], "cat dog zebra") + + page_ids = search_index_app.search("rose") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "plants") + self.assertEqual(page["body"], "bluebell rose jasmine") + + page_ids = search_index_app.search("calcium") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "minerals") + self.assertEqual(page["body"], "iron zinc calcium") + + page_ids = search_index_app.search("dog") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "animals") + self.assertEqual(page["body"], "cat dog zebra") + + page_ids = search_index_app.search("bluebell") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "plants") + self.assertEqual(page["body"], "bluebell rose jasmine") + + page_ids = search_index_app.search("zinc") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "minerals") + self.assertEqual(page["body"], "iron zinc calcium") + + # Search for multiple words in same page. + page_ids = search_index_app.search("dog cat") + self.assertEqual(1, len(page_ids)) + page = content_management_app.get_page_by_id(page_ids[0]) + self.assertEqual(page["slug"], "animals") + self.assertEqual(page["body"], "cat dog zebra") + + # Search for multiple words in same page, expect no results. + page_ids = search_index_app.search("rose zebra") + self.assertEqual(0, len(page_ids)) + + # Search for alternative words, expect two results. + page_ids = search_index_app.search("rose OR zebra") + pages = [content_management_app.get_page_by_id(page_id) for page_id in page_ids] + self.assertEqual(2, len(pages)) + self.assertEqual(["animals", "plants"], sorted(p["slug"] for p in pages)) + + +class TestWithSQLite(ContentManagementSystemTestCase): + env = { + "PERSISTENCE_MODULE": "eventsourcing.examples.contentmanagementsystem.sqlite", + "SQLITE_DBNAME": ":memory:", + } + + +class TestWithPostgres(ContentManagementSystemTestCase): + env = { + "PERSISTENCE_MODULE": "eventsourcing.examples.contentmanagementsystem.postgres", + "POSTGRES_DBNAME": "eventsourcing", + "POSTGRES_HOST": "127.0.0.1", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "eventsourcing", + "POSTGRES_PASSWORD": "eventsourcing", + } + + def setUp(self) -> None: + super().setUp() + self.drop_tables() + + def tearDown(self) -> None: + self.drop_tables() + super().tearDown() + + def drop_tables(self) -> None: + db = PostgresDatastore( + self.env["POSTGRES_DBNAME"], + self.env["POSTGRES_HOST"], + self.env["POSTGRES_PORT"], + self.env["POSTGRES_USER"], + self.env["POSTGRES_PASSWORD"], + ) + drop_postgres_table(db, "public.contentmanagementapplication_events") + drop_postgres_table(db, "public.pages_projection_example") + drop_postgres_table(db, "public.searchindexapplication_events") + drop_postgres_table(db, "public.searchindexapplication_tracking") + db.close() + + +del ContentManagementSystemTestCase diff --git a/eventsourcing/examples/searchablecontent/application.py b/eventsourcing/examples/searchablecontent/application.py index 9c2473eb..bce0d624 100644 --- a/eventsourcing/examples/searchablecontent/application.py +++ b/eventsourcing/examples/searchablecontent/application.py @@ -1,4 +1,5 @@ -from typing import Any, Dict, List, Optional, Union, cast +from typing import Any, List, Optional, Tuple, Union, cast +from uuid import UUID from eventsourcing.domain import DomainEventProtocol, MutableOrImmutableAggregate from eventsourcing.examples.contentmanagement.application import ( @@ -18,22 +19,22 @@ def save( *objs: Optional[Union[MutableOrImmutableAggregate, DomainEventProtocol]], **kwargs: Any, ) -> List[Recording]: - insert_page_bodies: Dict[str, str] = {} - update_page_bodies: Dict[str, str] = {} + insert_pages: List[Tuple[UUID, str, str, str]] = [] + update_pages: List[Tuple[UUID, str, str, str]] = [] for obj in objs: if isinstance(obj, Page): if obj.version == len(obj.pending_events): - insert_page_bodies[obj.slug] = obj.body + insert_pages.append((obj.id, obj.slug, obj.title, obj.body)) else: - update_page_bodies[obj.slug] = obj.body - kwargs["insert_page_bodies"] = insert_page_bodies - kwargs["update_page_bodies"] = update_page_bodies + update_pages.append((obj.id, obj.slug, obj.title, obj.body)) + kwargs["insert_pages"] = insert_pages + kwargs["update_pages"] = update_pages return super().save(*objs, **kwargs) def search(self, query: str) -> List[PageDetailsType]: pages = [] recorder = cast(SearchableContentRecorder, self.recorder) - for slug in recorder.search_page_bodies(query): - page = self.get_page_details(slug) + for page_id in recorder.search_pages(query): + page = self.get_page_by_id(page_id) pages.append(page) return pages diff --git a/eventsourcing/examples/searchablecontent/persistence.py b/eventsourcing/examples/searchablecontent/persistence.py index 2e951251..ade65273 100644 --- a/eventsourcing/examples/searchablecontent/persistence.py +++ b/eventsourcing/examples/searchablecontent/persistence.py @@ -1,12 +1,19 @@ from abc import abstractmethod -from typing import List +from typing import List, Tuple +from uuid import UUID -from eventsourcing.persistence import ApplicationRecorder +from eventsourcing.persistence import AggregateRecorder -class SearchableContentRecorder(ApplicationRecorder): +class SearchableContentRecorder(AggregateRecorder): @abstractmethod - def search_page_bodies(self, query: str) -> List[str]: + def search_pages(self, query: str) -> List[UUID]: """ - Returns page slugs for page bodies that match query. + Returns IDs for pages that match query. + """ + + @abstractmethod + def select_page(self, page_id: UUID) -> Tuple[str, str, str]: + """ + Returns slug, title and body for given ID. """ diff --git a/eventsourcing/examples/searchablecontent/postgres.py b/eventsourcing/examples/searchablecontent/postgres.py index 26659cda..5a742fc9 100644 --- a/eventsourcing/examples/searchablecontent/postgres.py +++ b/eventsourcing/examples/searchablecontent/postgres.py @@ -1,77 +1,70 @@ -from typing import Any, Dict, List, Optional, Sequence, cast +from typing import Any, List, Optional, Sequence, Tuple +from uuid import UUID +from eventsourcing.examples.contentmanagement.application import PageNotFound from eventsourcing.examples.searchablecontent.persistence import ( SearchableContentRecorder, ) -from eventsourcing.persistence import ApplicationRecorder, StoredEvent +from eventsourcing.persistence import StoredEvent from eventsourcing.postgres import ( Factory, + PostgresAggregateRecorder, PostgresApplicationRecorder, PostgresConnection, PostgresCursor, - PostgresDatastore, ) -class SearchableContentApplicationRecorder( - SearchableContentRecorder, PostgresApplicationRecorder +class PostgresSearchableContentRecorder( + SearchableContentRecorder, + PostgresAggregateRecorder, ): - def __init__( - self, - datastore: PostgresDatastore, - events_table_name: str = "stored_events", - page_bodies_table_name: str = "page_bodies", - ): - self.check_table_name_length(page_bodies_table_name, datastore.schema) - self.page_bodies_table_name = page_bodies_table_name - super().__init__(datastore, events_table_name) - self.insert_page_body_statement = ( - f"INSERT INTO {self.page_bodies_table_name} VALUES ($1, $2)" - ) - self.insert_page_body_statement_name = ( - f"insert_{page_bodies_table_name}".replace(".", "_") - ) - self.update_page_body_statement = ( - f"UPDATE {self.page_bodies_table_name} " - f"SET page_body = $1 WHERE page_slug = $2" - ) - self.update_page_body_statement_name = ( - f"update_{page_bodies_table_name}".replace(".", "_") - ) - self.search_page_body_statement = ( - f"SELECT page_slug FROM {self.page_bodies_table_name} WHERE " - f"to_tsvector('english', page_body) @@ websearch_to_tsquery('english', $1)" - ) + pages_table_name = "pages_projection_example" + select_page_statement = ( + f"SELECT page_slug, page_title, page_body FROM {pages_table_name}" + f" WHERE page_id = $1" + ) - self.search_page_body_statement_name = ( - f"search_{page_bodies_table_name}".replace(".", "_") - ) + select_page_statement_name = f"select_{pages_table_name}".replace(".", "_") + + insert_page_statement = f"INSERT INTO {pages_table_name} VALUES ($1, $2, $3, $4)" + insert_page_statement_name = f"insert_{pages_table_name}".replace(".", "_") + + update_page_statement = ( + f"UPDATE {pages_table_name} " + f"SET page_slug = $1, page_title = $2, page_body = $3 WHERE page_id = $4" + ) + update_page_statement_name = f"update_{pages_table_name}".replace(".", "_") + + search_pages_statement = ( + f"SELECT page_id FROM {pages_table_name} WHERE " + f"to_tsvector('english', page_body) @@ websearch_to_tsquery('english', $1)" + ) + search_pages_statement_name = f"search_{pages_table_name}".replace(".", "_") def construct_create_table_statements(self) -> List[str]: statements = super().construct_create_table_statements() statements.append( "CREATE TABLE IF NOT EXISTS " - f"{self.page_bodies_table_name} (" + f"{self.pages_table_name} (" + "page_id uuid, " "page_slug text, " + "page_title text, " "page_body text, " "PRIMARY KEY " - "(page_slug))" + "(page_id))" ) statements.append( - f"CREATE INDEX IF NOT EXISTS {self.page_bodies_table_name}_idx " - f"ON {self.page_bodies_table_name} " + f"CREATE INDEX IF NOT EXISTS {self.pages_table_name}_idx " + f"ON {self.pages_table_name} " f"USING GIN (to_tsvector('english', page_body))" ) return statements def _prepare_insert_events(self, conn: PostgresConnection) -> None: super()._prepare_insert_events(conn) - self._prepare( - conn, self.insert_page_body_statement_name, self.insert_page_body_statement - ) - self._prepare( - conn, self.update_page_body_statement_name, self.update_page_body_statement - ) + self._prepare(conn, self.insert_page_statement_name, self.insert_page_statement) + self._prepare(conn, self.update_page_statement_name, self.update_page_statement) def _insert_events( self, @@ -80,71 +73,94 @@ def _insert_events( **kwargs: Any, ) -> Optional[Sequence[int]]: notification_ids = super()._insert_events(c, stored_events, **kwargs) + self._insert_pages(c, **kwargs) + self._update_pages(c, **kwargs) + return notification_ids - # Insert page bodies. - insert_page_bodies = cast(Dict[str, str], kwargs.get("insert_page_bodies")) - if insert_page_bodies: - for page_slug, page_body in insert_page_bodies.items(): - statement_alias = self.statement_name_aliases[ - self.insert_page_body_statement_name - ] - c.execute( - f"EXECUTE {statement_alias}(%s, %s)", - ( - page_slug, - page_body, - ), - ) - - # Update page bodies. - update_page_bodies = cast(Dict[str, str], kwargs.get("update_page_bodies")) - if update_page_bodies: - for page_slug, page_body in update_page_bodies.items(): + def _insert_pages( + self, + c: PostgresCursor, + insert_pages: Sequence[Tuple[UUID, str, str, str]] = (), + **_: Any, + ) -> None: + for page_id, page_slug, page_title, page_body in insert_pages: + statement_alias = self.statement_name_aliases[ + self.insert_page_statement_name + ] + c.execute( + f"EXECUTE {statement_alias}(%s, %s, %s, %s)", + ( + page_id, + page_slug, + page_title, + page_body, + ), + ) + + def _update_pages( + self, + c: PostgresCursor, + update_pages: Sequence[Tuple[UUID, str, str, str]] = (), + **_: Any, + ) -> None: + for page_id, page_slug, page_title, page_body in update_pages: + statement_alias = self.statement_name_aliases[ + self.update_page_statement_name + ] + c.execute( + f"EXECUTE {statement_alias}(%s, %s, %s, %s)", + ( + page_slug, + page_title, + page_body, + page_id, + ), + ) + + def search_pages(self, query: str) -> List[UUID]: + page_ids = [] + + with self.datastore.get_connection() as conn: + self._prepare( + conn, + self.search_pages_statement_name, + self.search_pages_statement, + ) + with conn.transaction(commit=False) as curs: statement_alias = self.statement_name_aliases[ - self.update_page_body_statement_name + self.search_pages_statement_name ] - c.execute( - f"EXECUTE {statement_alias}(%s, %s)", - ( - page_body, - page_slug, - ), - ) - return notification_ids + curs.execute(f"EXECUTE {statement_alias}(%s)", [query]) + for row in curs.fetchall(): + page_ids.append(row["page_id"]) - def search_page_bodies(self, query: str) -> List[str]: - page_slugs = [] + return page_ids + def select_page(self, page_id: UUID) -> Tuple[str, str, str]: with self.datastore.get_connection() as conn: self._prepare( conn, - self.search_page_body_statement_name, - self.search_page_body_statement, + self.select_page_statement_name, + self.select_page_statement, ) with conn.transaction(commit=False) as curs: statement_alias = self.statement_name_aliases[ - self.search_page_body_statement_name + self.select_page_statement_name ] - curs.execute(f"EXECUTE {statement_alias}(%s)", [query]) + curs.execute(f"EXECUTE {statement_alias}(%s)", [str(page_id)]) for row in curs.fetchall(): - page_slugs.append(row["page_slug"]) + return row["page_slug"], row["page_title"], row["page_body"] + raise PageNotFound(f"Page ID {page_id} not found") - return page_slugs + +class SearchableContentApplicationRecorder( + PostgresSearchableContentRecorder, PostgresApplicationRecorder +): + pass class SearchableContentInfrastructureFactory(Factory): - def application_recorder(self) -> ApplicationRecorder: - prefix = (self.datastore.schema + ".") if self.datastore.schema else "" - prefix += self.env.name.lower() or "stored" - events_table_name = prefix + "_events" - page_bodies_table_name = prefix + "_page_bodies" - recorder = SearchableContentApplicationRecorder( - datastore=self.datastore, - events_table_name=events_table_name, - page_bodies_table_name=page_bodies_table_name, - ) - recorder.create_table() - return recorder + application_recorder_class = SearchableContentApplicationRecorder del Factory diff --git a/eventsourcing/examples/searchablecontent/sqlite.py b/eventsourcing/examples/searchablecontent/sqlite.py index eca390d3..68c1947f 100644 --- a/eventsourcing/examples/searchablecontent/sqlite.py +++ b/eventsourcing/examples/searchablecontent/sqlite.py @@ -1,73 +1,71 @@ -from typing import Any, Dict, List, Optional, Sequence, cast +from typing import Any, List, Optional, Sequence, Tuple +from uuid import UUID +from eventsourcing.examples.contentmanagement.application import PageNotFound from eventsourcing.examples.searchablecontent.persistence import ( SearchableContentRecorder, ) -from eventsourcing.persistence import ApplicationRecorder, StoredEvent +from eventsourcing.persistence import StoredEvent from eventsourcing.sqlite import ( Factory, + SQLiteAggregateRecorder, SQLiteApplicationRecorder, SQLiteCursor, - SQLiteDatastore, ) -class SearchableContentApplicationRecorder( - SearchableContentRecorder, SQLiteApplicationRecorder +class SQLiteSearchableContentRecorder( + SearchableContentRecorder, SQLiteAggregateRecorder ): - def __init__( - self, - datastore: SQLiteDatastore, - events_table_name: str = "stored_events", - page_bodies_table_name: str = "page_bodies", - ): - self.page_bodies_table_name = page_bodies_table_name - self.page_bodies_virtual_table_name = page_bodies_table_name + "_fts" - super().__init__(datastore, events_table_name) - self.insert_page_body_statement = ( - f"INSERT INTO {self.page_bodies_table_name} VALUES (?, ?)" - ) - self.update_page_body_statement = ( - f"UPDATE {self.page_bodies_table_name} " - f"SET page_body = ? WHERE page_slug = ?" - ) - self.search_page_body_statement = ( - f"SELECT page_slug FROM {self.page_bodies_virtual_table_name} WHERE " - f"page_body MATCH $1" - ) + pages_table_name = "pages_projection_example" + pages_virtual_table_name = pages_table_name + "_fts" + select_page_statement = ( + f"SELECT page_slug, page_title, page_body FROM " + f"{pages_table_name} WHERE page_id = ?" + ) + insert_page_statement = f"INSERT INTO {pages_table_name} VALUES (?, ?, ?, ?)" + update_page_statement = ( + f"UPDATE {pages_table_name} " + f"SET page_slug = ?, page_title = ?, page_body = ? WHERE page_id = ?" + ) + search_pages_statement = ( + f"SELECT page_id FROM {pages_virtual_table_name} WHERE " f"page_body MATCH ?" + ) def construct_create_table_statements(self) -> List[str]: statements = super().construct_create_table_statements() statements.append( "CREATE TABLE IF NOT EXISTS " - f"{self.page_bodies_table_name} (" - "page_slug text, " - "page_body text, " + f"{self.pages_table_name} (" + "page_id TEXT, " + "page_slug TEXT, " + "page_title TEXT, " + "page_body TEXT, " "PRIMARY KEY " - "(page_slug)) " + "(page_id)) " ) statements.append( - f"CREATE VIRTUAL TABLE {self.page_bodies_virtual_table_name} USING fts5(" - f"page_slug, page_body, content='{self.page_bodies_table_name}')" + f"CREATE VIRTUAL TABLE {self.pages_virtual_table_name} USING fts5(" + f"page_id, page_body, content='{self.pages_table_name}')" ) statements.append( - f"CREATE TRIGGER page_bodies_ai AFTER INSERT ON " - f"{self.page_bodies_table_name} BEGIN " - f"INSERT INTO {self.page_bodies_virtual_table_name} " - f"(rowid, page_slug, page_body) " - f"VALUES (new.rowid, new.page_slug, new.page_body); " + f"CREATE TRIGGER projection_ai AFTER INSERT ON " + f"{self.pages_table_name} BEGIN " + f"INSERT INTO {self.pages_virtual_table_name} " + f"(rowid, page_id, page_body) " + f"VALUES (new.rowid, new.page_id, new.page_body); " f"END" ) statements.append( - f"CREATE TRIGGER page_bodies_au AFTER UPDATE ON " - f"{self.page_bodies_table_name} " + f"CREATE TRIGGER projection_au AFTER UPDATE ON " + f"{self.pages_table_name} " f"BEGIN " - f"INSERT INTO {self.page_bodies_virtual_table_name} " - f"({self.page_bodies_virtual_table_name}, rowid, page_slug, page_body) " - f"VALUES ('delete', old.rowid, old.page_slug, old.page_body);" - f"INSERT INTO {self.page_bodies_virtual_table_name} " - f"(rowid, page_slug, page_body) " - f"VALUES (new.rowid, new.page_slug, new.page_body); " + f"INSERT INTO {self.pages_virtual_table_name} " + f"({self.pages_virtual_table_name}, rowid, page_id, page_body) " + f"VALUES ('delete', old.rowid, old.page_id, old.page_body);" + f"INSERT INTO {self.pages_virtual_table_name} " + f"(rowid, page_id, page_body) " + f"VALUES (new.rowid, new.page_id, new.page_body); " f"END" ) return statements @@ -79,36 +77,60 @@ def _insert_events( **kwargs: Any, ) -> Optional[Sequence[int]]: notification_ids = super()._insert_events(c, stored_events, **kwargs) - - # Insert page bodies. - insert_page_bodies = cast(Dict[str, str], kwargs.get("insert_page_bodies")) - if insert_page_bodies: - for page_slug, page_body in insert_page_bodies.items(): - c.execute(self.insert_page_body_statement, (page_slug, page_body)) - - # Update page bodies. - update_page_bodies = cast(Dict[str, str], kwargs.get("update_page_bodies")) - if update_page_bodies: - for page_slug, page_body in update_page_bodies.items(): - c.execute(self.update_page_body_statement, (page_body, page_slug)) + self._insert_pages(c, **kwargs) + self._update_pages(c, **kwargs) return notification_ids - def search_page_bodies(self, query: str) -> List[str]: + def _insert_pages( + self, + c: SQLiteCursor, + insert_pages: Sequence[Tuple[UUID, str, str, str]] = (), + **_: Any, + ) -> None: + for page_id, page_slug, page_title, page_body in insert_pages: + c.execute( + self.insert_page_statement, + (str(page_id), page_slug, page_title, page_body), + ) + + def _update_pages( + self, + c: SQLiteCursor, + update_pages: Sequence[Tuple[UUID, str, str, str]] = (), + **_: Any, + ) -> None: + for page_id, page_slug, page_title, page_body in update_pages: + c.execute( + self.update_page_statement, + (page_slug, page_title, page_body, str(page_id)), + ) + + def search_pages(self, query: str) -> List[UUID]: page_slugs = [] with self.datastore.transaction(commit=False) as c: - c.execute(self.search_page_body_statement, [query]) + c.execute(self.search_pages_statement, [query]) for row in c.fetchall(): - page_slugs.append(row["page_slug"]) + page_slugs.append(UUID(row["page_id"])) return page_slugs + def select_page(self, page_id: UUID) -> Tuple[str, str, str]: + with self.datastore.transaction(commit=False) as c: + c.execute(self.select_page_statement, [str(page_id)]) + for row in c.fetchall(): + return row["page_slug"], row["page_title"], row["page_body"] + raise PageNotFound(f"Page ID {page_id} not found") + + +class SearchableContentApplicationRecorder( + SQLiteSearchableContentRecorder, SQLiteApplicationRecorder +): + pass + class SearchableContentInfrastructureFactory(Factory): - def application_recorder(self) -> ApplicationRecorder: - recorder = SearchableContentApplicationRecorder(datastore=self.datastore) - recorder.create_table() - return recorder + application_recorder_class = SearchableContentApplicationRecorder del Factory diff --git a/eventsourcing/examples/searchablecontent/test_searchablecontent.py b/eventsourcing/examples/searchablecontent/test_application.py similarity index 92% rename from eventsourcing/examples/searchablecontent/test_searchablecontent.py rename to eventsourcing/examples/searchablecontent/test_application.py index ebc669b4..95680c54 100644 --- a/eventsourcing/examples/searchablecontent/test_searchablecontent.py +++ b/eventsourcing/examples/searchablecontent/test_application.py @@ -11,7 +11,7 @@ from eventsourcing.tests.postgres_utils import drop_postgres_table -class SearchableContentTestCase(TestCase): +class SearchableContentApplicationTestCase(TestCase): env: Dict[str, str] = {} def test_app(self) -> None: @@ -68,14 +68,14 @@ def test_app(self) -> None: self.assertEqual(["animals", "plants"], sorted(p["slug"] for p in pages)) -class TestWithSQLite(SearchableContentTestCase): +class TestWithSQLite(SearchableContentApplicationTestCase): env = { "PERSISTENCE_MODULE": "eventsourcing.examples.searchablecontent.sqlite", "SQLITE_DBNAME": ":memory:", } -class TestWithPostgres(SearchableContentTestCase): +class TestWithPostgres(SearchableContentApplicationTestCase): env = {"PERSISTENCE_MODULE": "eventsourcing.examples.searchablecontent.postgres"} def setUp(self) -> None: @@ -100,8 +100,8 @@ def drop_tables(self) -> None: os.environ["POSTGRES_PASSWORD"], ) drop_postgres_table(db, "public.searchablecontentapplication_events") - drop_postgres_table(db, "public.searchablecontentapplication_page_bodies") + drop_postgres_table(db, "public.pages_projection_example") db.close() -del SearchableContentTestCase +del SearchableContentApplicationTestCase diff --git a/eventsourcing/examples/searchablecontent/test_recorder.py b/eventsourcing/examples/searchablecontent/test_recorder.py new file mode 100644 index 00000000..fef3f378 --- /dev/null +++ b/eventsourcing/examples/searchablecontent/test_recorder.py @@ -0,0 +1,65 @@ +import os +from typing import Dict, cast +from unittest import TestCase +from uuid import uuid4 + +from eventsourcing.examples.contentmanagement.application import PageNotFound +from eventsourcing.examples.searchablecontent.application import ( + SearchableContentApplication, +) +from eventsourcing.examples.searchablecontent.persistence import ( + SearchableContentRecorder, +) +from eventsourcing.postgres import PostgresDatastore +from eventsourcing.tests.postgres_utils import drop_postgres_table + + +class SearchableContentRecorderTestCase(TestCase): + env: Dict[str, str] = {} + + def test_recorder(self) -> None: + # Just need to cover the case where select_page() raises PageNotFound. + app = SearchableContentApplication(env=self.env) + + recorder = cast(SearchableContentRecorder, app.recorder) + with self.assertRaises(PageNotFound): + recorder.select_page(uuid4()) + + +class TestWithSQLite(SearchableContentRecorderTestCase): + env = { + "PERSISTENCE_MODULE": "eventsourcing.examples.searchablecontent.sqlite", + "SQLITE_DBNAME": ":memory:", + } + + +class TestWithPostgres(SearchableContentRecorderTestCase): + env = {"PERSISTENCE_MODULE": "eventsourcing.examples.searchablecontent.postgres"} + + def setUp(self) -> None: + super().setUp() + os.environ["POSTGRES_DBNAME"] = "eventsourcing" + os.environ["POSTGRES_HOST"] = "127.0.0.1" + os.environ["POSTGRES_PORT"] = "5432" + os.environ["POSTGRES_USER"] = "eventsourcing" + os.environ["POSTGRES_PASSWORD"] = "eventsourcing" + self.drop_tables() + + def tearDown(self) -> None: + self.drop_tables() + super().tearDown() + + def drop_tables(self) -> None: + db = PostgresDatastore( + os.environ["POSTGRES_DBNAME"], + os.environ["POSTGRES_HOST"], + os.environ["POSTGRES_PORT"], + os.environ["POSTGRES_USER"], + os.environ["POSTGRES_PASSWORD"], + ) + drop_postgres_table(db, "public.searchablecontentapplication_events") + drop_postgres_table(db, "public.pages_projection_example") + db.close() + + +del SearchableContentRecorderTestCase diff --git a/eventsourcing/postgres.py b/eventsourcing/postgres.py index f80f91b7..d6f6ce29 100644 --- a/eventsourcing/postgres.py +++ b/eventsourcing/postgres.py @@ -785,6 +785,10 @@ class Factory(InfrastructureFactory): POSTGRES_SCHEMA = "POSTGRES_SCHEMA" CREATE_TABLE = "CREATE_TABLE" + aggregate_recorder_class = PostgresAggregateRecorder + application_recorder_class = PostgresApplicationRecorder + process_recorder_class = PostgresProcessRecorder + def __init__(self, env: Environment): super().__init__(env) dbname = self.env.get(self.POSTGRES_DBNAME) @@ -960,7 +964,7 @@ def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder: events_table_name = prefix + "_" + purpose if self.datastore.schema: events_table_name = f"{self.datastore.schema}.{events_table_name}" - recorder = PostgresAggregateRecorder( + recorder = type(self).aggregate_recorder_class( datastore=self.datastore, events_table_name=events_table_name, ) @@ -973,7 +977,7 @@ def application_recorder(self) -> ApplicationRecorder: events_table_name = prefix + "_events" if self.datastore.schema: events_table_name = f"{self.datastore.schema}.{events_table_name}" - recorder = PostgresApplicationRecorder( + recorder = type(self).application_recorder_class( datastore=self.datastore, events_table_name=events_table_name, ) @@ -989,7 +993,7 @@ def process_recorder(self) -> ProcessRecorder: if self.datastore.schema: events_table_name = f"{self.datastore.schema}.{events_table_name}" tracking_table_name = f"{self.datastore.schema}.{tracking_table_name}" - recorder = PostgresProcessRecorder( + recorder = type(self).process_recorder_class( datastore=self.datastore, events_table_name=events_table_name, tracking_table_name=tracking_table_name, diff --git a/eventsourcing/sqlite.py b/eventsourcing/sqlite.py index 8295477a..75781f2d 100644 --- a/eventsourcing/sqlite.py +++ b/eventsourcing/sqlite.py @@ -512,6 +512,10 @@ class Factory(InfrastructureFactory): SQLITE_LOCK_TIMEOUT = "SQLITE_LOCK_TIMEOUT" CREATE_TABLE = "CREATE_TABLE" + aggregate_recorder_class = SQLiteAggregateRecorder + application_recorder_class = SQLiteApplicationRecorder + process_recorder_class = SQLiteProcessRecorder + def __init__(self, env: Environment): super().__init__(env) db_name = self.env.get(self.SQLITE_DBNAME) @@ -542,7 +546,7 @@ def __init__(self, env: Environment): def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder: events_table_name = "stored_" + purpose - recorder = SQLiteAggregateRecorder( + recorder = self.aggregate_recorder_class( datastore=self.datastore, events_table_name=events_table_name, ) @@ -551,13 +555,13 @@ def aggregate_recorder(self, purpose: str = "events") -> AggregateRecorder: return recorder def application_recorder(self) -> ApplicationRecorder: - recorder = SQLiteApplicationRecorder(datastore=self.datastore) + recorder = self.application_recorder_class(datastore=self.datastore) if self.env_create_table(): recorder.create_table() return recorder def process_recorder(self) -> ProcessRecorder: - recorder = SQLiteProcessRecorder(datastore=self.datastore) + recorder = self.process_recorder_class(datastore=self.datastore) if self.env_create_table(): recorder.create_table() return recorder From 6ff193ae8c4605113d7e680482a0131cd230004d Mon Sep 17 00:00:00 2001 From: johnbywater Date: Mon, 16 Oct 2023 19:28:00 +0100 Subject: [PATCH 005/107] Fixed wording in "content management system" example. --- docs/topics/examples/content-management-system.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/topics/examples/content-management-system.rst b/docs/topics/examples/content-management-system.rst index e9797e0d..c6f4abf1 100644 --- a/docs/topics/examples/content-management-system.rst +++ b/docs/topics/examples/content-management-system.rst @@ -25,14 +25,14 @@ Its ``policy()`` function is coded to process the ``Page.Created`` and ``Page.Bo events of the ``ContentManagementApplication``. It also has a ``search()`` method that returns a list of page IDs. -It that works in a similar way to the ``SearchableContentApplication`` class in -:doc:`/topics/examples/searchable-content`, by setting variable keyword arguments -``insert_pages`` and ``update_pages``. However, rather than populating variable -keyword arguments in the ``save()`` method, it populates ``insert_pages`` and ``update_pages`` -within its ``policy()`` function. The ``insert_pages`` and ``update_pages`` arguments are set +The ``SearchIndexApplication`` class in this example works in a similar way to the ``SearchableContentApplication`` +class in :doc:`/topics/examples/searchable-content`, by setting variable keyword arguments +``insert_pages`` and ``update_pages`` on a the :class:`~eventsourcing.application.ProcessingEvent` object. +However, rather than populating the variable keyword arguments in the ``save()`` method, it populates ``insert_pages`` +and ``update_pages`` within its ``policy()`` function. The ``insert_pages`` and ``update_pages`` arguments are set on the :class:`~eventsourcing.application.ProcessingEvent` object passed into the ``policy()`` -function, which carries an event notification ID that indicates the position -in the application sequence of the domain event that is being processed. +function, which carries an event notification ID that indicates the position in the application sequence of +the domain event that is being processed. The application will be configured to run with a custom :class:`~eventsourcing.persistence.ProcessRecorder` so that search index records will be updated atomically with the inserting of a tracking record which From 06c2ed0ed566b09d11e55f1c473518833626227d Mon Sep 17 00:00:00 2001 From: johnbywater Date: Mon, 16 Oct 2023 19:28:26 +0100 Subject: [PATCH 006/107] Expanded build matrix to include Python 3.12. --- .github/workflows/runtests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/runtests.yaml b/.github/workflows/runtests.yaml index 6d960ec3..070ac14c 100644 --- a/.github/workflows/runtests.yaml +++ b/.github/workflows/runtests.yaml @@ -10,7 +10,7 @@ jobs: max-parallel: 20 fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] postgres-version: ["12", "13", "14", "15"] # Service containers to run with `runner-job` From 80885efe2fe3a0ca472def06446f8f064f4dbaaf Mon Sep 17 00:00:00 2001 From: johnbywater Date: Mon, 16 Oct 2023 20:12:12 +0100 Subject: [PATCH 007/107] Added trove classifier for Python 3.12. --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index f1c1e4cb..57033dd6 100644 --- a/setup.py +++ b/setup.py @@ -88,6 +88,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Python Modules", ], From bbd0f3fa80d85e85536c9610b60d7cc15015f468 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:47:17 +0100 Subject: [PATCH 008/107] Moved dependencies on orjson and pydantic to "docs" extra. --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 57033dd6..dcaf0792 100644 --- a/setup.py +++ b/setup.py @@ -12,12 +12,12 @@ + [ "Sphinx==4.2.0", "sphinx_rtd_theme==1.3.0", + "orjson", + "pydantic", ] ) dev_requires = docs_requires + [ - "orjson", - "pydantic", "python-coveralls", "coverage", "black", From 3cb6e379a8db139ced78283f7045f4834e508f56 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:48:03 +0100 Subject: [PATCH 009/107] Adjusted docstrings of runner classes in system.py. --- eventsourcing/system.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/eventsourcing/system.py b/eventsourcing/system.py index 0a596223..01a22ad4 100644 --- a/eventsourcing/system.py +++ b/eventsourcing/system.py @@ -465,13 +465,10 @@ def __init__(self, system: System, env: Optional[EnvType] = None): def start(self) -> None: """ - Starts the runner. - The applications are constructed, and setup to lead and follow - each other, according to the system definition. - The followers are setup to follow the applications they follow - (have a notification log reader with the notification log of the - leader), and their leaders are setup to lead the runner itself - (send prompts). + Starts the runner. The applications mentioned in the system definition + are constructed. The followers are set up to follow the applications + they are defined as following in the system definition. And the leaders + are set up to lead the runner itself. """ super().start() @@ -486,7 +483,7 @@ def start(self) -> None: assert isinstance(follower, Follower) follower.follow(leader_name, leader.notification_log) - # Setup leaders to notify followers. + # Setup leaders to lead this runner. for name in self.system.leaders: leader = cast(Leader, self.apps[name]) assert isinstance(leader, Leader) @@ -494,12 +491,15 @@ def start(self) -> None: def receive_recording_event(self, recording_event: RecordingEvent) -> None: """ - Receives recording event by appending it to list of received recording - events. - - Unless this method has previously been called and not yet returned, it - will then attempt to make the followers process all received recording - events, until there are none remaining. + Receives recording event by appending the name of the leader + to a list of prompted names. + + Then, unless this method has previously been called and not yet returned, + each the of the prompted names is resolved to a leader application, and its + followers pull and process events from that application. This may lead to + further names being added to the list of prompted names. This process + continues until there are no more prompted names. In this way, a system + of applications will process all events in a single thread. """ leader_name = recording_event.application_name with self._prompted_names_lock: From 4620f81ce8320b45f5249c7928000e1efaf314f1 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:48:53 +0100 Subject: [PATCH 010/107] Adjusted TestDocs to drop tables after testing each doc. --- eventsourcing/tests/docs_tests/test_docs.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/eventsourcing/tests/docs_tests/test_docs.py b/eventsourcing/tests/docs_tests/test_docs.py index 6a110005..8fdb23f4 100644 --- a/eventsourcing/tests/docs_tests/test_docs.py +++ b/eventsourcing/tests/docs_tests/test_docs.py @@ -26,11 +26,24 @@ def setUp(self) -> None: "eventsourcing", ) drop_postgres_table(db, "dogschool_events") + drop_postgres_table(db, "counters_events") + drop_postgres_table(db, "counters_tracking") def tearDown(self) -> None: self.clean_env() def clean_env(self): + db = PostgresDatastore( + "eventsourcing", + "127.0.0.1", + "5432", + "eventsourcing", + "eventsourcing", + ) + drop_postgres_table(db, "dogschool_events") + drop_postgres_table(db, "counters_events") + drop_postgres_table(db, "counters_tracking") + keys = [ "PERSISTENCE_MODULE", "IS_SNAPSHOTTING_ENABLED", @@ -89,7 +102,7 @@ def test_docs(self): for name in filenames: if name in skipped: continue - if name.endswith(".rst"): + # if name.endswith(".rst"): # if ( # name.endswith("persistence.rst") # or name.endswith("domain.rst") @@ -97,7 +110,7 @@ def test_docs(self): # or name.endswith("system.rst") # or name.endswith("examples.rst") # ): - # if name.endswith('quick_start.rst'): + if name.endswith('part4.rst'): # if name.endswith('aggregates_in_ddd.rst'): # if name.endswith('example_application.rst'): # if name.endswith('everything.rst'): From 90a9e863204a6ecd2b0467e736f4ec7bb9251d23 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:49:57 +0100 Subject: [PATCH 011/107] Slightly adjusted wording in application.rst. --- docs/topics/application.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/topics/application.rst b/docs/topics/application.rst index 093080f6..48b22f24 100644 --- a/docs/topics/application.rst +++ b/docs/topics/application.rst @@ -86,10 +86,10 @@ domain event notifications. The :class:`~eventsourcing.application.Application` class defines an object method :func:`~eventsourcing.application.Application.take_snapshot` which can be used for :ref:`snapshotting ` existing aggregates. Snapshotting -isn't necessary, but can help to reduce the time it takes to access aggregates with -lots of domain events. +isn't necessary, but can help to reduce the time it takes to access aggregates that +would otherwise be reconstructed from a large number of recorded domain events. -The :class:`~eventsourcing.application.Application` class has an ``env`` attribute +The :class:`~eventsourcing.application.Application` class has an ``env`` attribute, which can be redefined on your application classes. Application objects also have an ``env`` attribute which is determined by a combination of the application class attribute, the operating system environment, and by an optional constructor argument. From 13b3bbc34d5cff6325fc1b9c44f81ca5cbff137b Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:50:25 +0100 Subject: [PATCH 012/107] Added anchor refs to examples.rst. --- docs/topics/examples.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/topics/examples.rst b/docs/topics/examples.rst index 188b28d9..16e16027 100644 --- a/docs/topics/examples.rst +++ b/docs/topics/examples.rst @@ -30,6 +30,7 @@ involves creating and updating a ``Dog`` aggregate, and taking a snapshot. examples/aggregate7 examples/aggregate8 +.. _Example applications: Example applications ==================== @@ -43,6 +44,8 @@ Example applications examples/searchable-timestamps examples/searchable-content +.. _Example systems: + Example systems =============== From 7444d04ba6e505d4f743ab37babe1acb186a34af Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:51:38 +0100 Subject: [PATCH 013/107] Added Tutorial Part 4 (about systems). --- docs/topics/tutorial.rst | 1 + docs/topics/tutorial/part4.rst | 364 +++++++++++++++++++++++++++++++++ 2 files changed, 365 insertions(+) create mode 100644 docs/topics/tutorial/part4.rst diff --git a/docs/topics/tutorial.rst b/docs/topics/tutorial.rst index 189492fd..2c4da589 100644 --- a/docs/topics/tutorial.rst +++ b/docs/topics/tutorial.rst @@ -16,3 +16,4 @@ documentation. tutorial/part1 tutorial/part2 tutorial/part3 + tutorial/part4 diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst new file mode 100644 index 00000000..bf9a711b --- /dev/null +++ b/docs/topics/tutorial/part4.rst @@ -0,0 +1,364 @@ +=========================== +Tutorial - Part 4 - Systems +=========================== + + +As we saw in :doc:`Part 3 `, we can use the library's +:class:`~eventsourcing.application.Application` class to define event-sourced +applications. In this part, we will create a second event-driven application which +pulls and processes event notifications from the notification log of the ``DogSchool`` +application. + +First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. + +.. code-block:: python + + from eventsourcing.application import Application + from eventsourcing.domain import Aggregate, event + + + class DogSchool(Application): + def register_dog(self, name): + dog = Dog(name) + self.save(dog) + return dog.id + + def add_trick(self, dog_id, trick): + dog = self.repository.get(dog_id) + dog.add_trick(trick=trick) + self.save(dog) + + def get_dog(self, dog_id): + dog = self.repository.get(dog_id) + return {'name': dog.name, 'tricks': tuple(dog.tricks)} + + + class Dog(Aggregate): + @event('Registered') + def __init__(self, name): + self.name = name + self.tricks = [] + + @event('TrickAdded') + def add_trick(self, trick): + self.tricks.append(trick) + + + +Process applications +==================== + +The most important thing that needs to be defined when processing events is a policy function. +The policy function defines how individual events will be processed. The policy function creates +changes to the state of the process application. A policy function has different responses for +different types of events. + +For example, we can define a policy that processes the ``Dog.TrickAdded`` events of the ``DogSchool`` +application, so that we can count the number of tricks that have been added. + +The ``Counters`` class below extends the ``ProcessApplication`` class by implementing a ``policy()`` +function. The event-sourced aggregate ``Counter`` has a method ``increment()`` which increments the +count for a particular trick. When a ``Dog.TrickAdded`` event is processed, the ``increment()`` method +is called. + +New events created in the policy function are collected by the ``process_event`` that is passed into +the ``policy()`` function along with the ``domain_event``. This ensures that all new events created +by a policy function in responding to an upstream domain event will be recorded atomically along with +a tracking record that indicates the position of the event in the upstream sequence of the domain event +that has been processed. + +.. code-block:: python + + from uuid import uuid5, NAMESPACE_URL + from eventsourcing.application import AggregateNotFound + from eventsourcing.system import ProcessApplication + from eventsourcing.dispatch import singledispatchmethod + + class Counters(ProcessApplication): + @singledispatchmethod + def policy(self, domain_event, process_event): + """Default policy""" + + @policy.register(Dog.TrickAdded) + def _(self, domain_event, process_event): + trick = domain_event.trick + try: + counter_id = Counter.create_id(trick) + counter = self.repository.get(counter_id) + except AggregateNotFound: + counter = Counter(trick) + counter.increment() + process_event.collect_events(counter) + + def get_count(self, trick): + counter_id = Counter.create_id(trick) + try: + counter = self.repository.get(counter_id) + except AggregateNotFound: + return 0 + return counter.count + + + class Counter(Aggregate): + def __init__(self, name): + self.name = name + self.count = 0 + + @classmethod + def create_id(cls, name): + return uuid5(NAMESPACE_URL, f'/counters/{name}') + + @event('Incremented') + def increment(self): + self.count += 1 + + +Defining an event-driven system +=============================== + +Rather than manually constructing the applications and pulling and processing events, we can use +the library's :class:`~eventsourcing.system.System` class to indicate which application is the +"leader" and which is the "follower". In this way, just like the persistence infrastructure that +each application will use can be defined when the applications are constructed, also the manner +in which the events will be pulled and processed can be defined when the system is run. + +.. code-block:: python + + from eventsourcing.system import System + + system = System(pipes=[[DogSchool, Counters]]) + + +Runnning an event-driven system +=============================== + +Just like it's possible to store events in different ways, it's possible to run an event-driven system +in different ways. There are many possibilities for the orchestration of the applications in a system +and for interprocess communication between the applications. One possibility is to use a single thread, +and pull and process events sequentially. Another possibility is to use multiple threads in the same +operating system process, with events processed concurrently and asynchronously. Another possibility is +to use multiple operating system processes on the same machine, or alternatively on different machines +in a network. Furthermore, when running a system with multiple operating system processes, there are +many possible alternatives for inter-process communication by which events are transported from one +application to another. + +The important thing, in all these cases, is to pull and process a sequence of events, and for new +state in the downstream application to be recorded atomically along with a unique tracking record +that indicates the position in the upstream sequence. And, when resuming the processing of events, +to use the last recorded position in the downstream application to pull subsequent events from the +upstream application. To demonstrate how this works, this library provides a +:class:`~eventsourcing.system.SingleThreadedRunner` and a :class:`~eventsourcing.system.MultiThreadedRunner`. + +The :class:`~eventsourcing.system.SingleThreadedRunner` and a :class:`~eventsourcing.system.MultiThreadedRunner` +implement the abstract :class:`~eventsourcing.system.Runner` class. These system runners are constructed +with an instance of the :class:`~eventsourcing.system.System` class, and optionally an ``env`` dictionary. + +The runners have a :func:`~eventsourcing.system.Runner.start`` method which constructs and connects the +applications. The runners also have a :func:`~eventsourcing.system.Runner.get`` method, which returns an +application. When application command methods are called, new events will be propagated and processed, +according to the system definition and the application policies. Application query methods can be used +to obtain the resulting state of the system. + +The ``test()`` function below shows how the abstract runner interface can be used to operate the dog school +trick counting system. We will call the ``test()`` function firstly with the +:class:`~eventsourcing.system.SingleThreadedRunner` and then the :class:`~eventsourcing.system.MultiThreadedRunner`. +The applications will use the POPO persistence module by default. We will then run the system with the +library's SQLite persistence module, and then the PosgreSQL persistence module. + +.. code-block:: python + + from time import sleep + + def test(system, runner_class, wait=0, env=None): + + runner = runner_class(system, env=env) + runner.start() + + school = runner.get(DogSchool) + counters = runner.get(Counters) + + dog_id1 = school.register_dog('Billy') + dog_id2 = school.register_dog('Milly') + dog_id3 = school.register_dog('Scrappy') + + school.add_trick(dog_id1, 'roll over') + school.add_trick(dog_id2, 'roll over') + school.add_trick(dog_id3, 'roll over') + + sleep(wait) + + assert counters.get_count('roll over') == 3 + assert counters.get_count('fetch ball') == 0 + assert counters.get_count('play dead') == 0 + + school.add_trick(dog_id1, 'fetch ball') + school.add_trick(dog_id2, 'fetch ball') + + sleep(wait) + + assert counters.get_count('roll over') == 3 + assert counters.get_count('fetch ball') == 2 + assert counters.get_count('play dead') == 0 + + school.add_trick(dog_id1, 'play dead') + + sleep(wait) + + assert counters.get_count('roll over') == 3 + assert counters.get_count('fetch ball') == 2 + assert counters.get_count('play dead') == 1 + + runner.stop() + + +Single-threaded runner +====================== + +We can run the system with the :class:`~eventsourcing.system.SingleThreadedRunner`. + +.. code-block:: python + + from eventsourcing.system import SingleThreadedRunner + + test(system, SingleThreadedRunner) + + +The applications will use the default POPO persistence module, because the environment variable +``PERSISTENCE_MODULE`` has not been set. + +Multi-threaded runner +===================== + +We can also run the system with the :class:`~eventsourcing.system.MultiThreadedRunner`. + +.. code-block:: python + + from eventsourcing.system import MultiThreadedRunner + + test(system, MultiThreadedRunner, wait=0.1) + + +SQLite environment +================== + +We can also run the system after configuring the applications to use the library's SQLite persistence module. +In the example below, the applications use an in-memory SQLite database. + +.. code-block:: python + + import os + + + # Use SQLite for persistence. + os.environ['PERSISTENCE_MODULE'] = 'eventsourcing.sqlite' + + # Use a separate in-memory database for each application. + os.environ['SQLITE_DBNAME'] = ':memory:' + + # Run the system tests. + test(system, SingleThreadedRunner) + +When running the system with the multi-threaded runner and SQLite databases, we need to be +careful to use separate databases for each application. We could use a file-based +database, but here we will use in-memory SQLite databases. Because we need SQLite's in-memory +databases to support multi-threading, we need to enable SQLite's shared cache. Because we +need to enable the shared cache, and we need more than one database in the same operating +system process, we also need to use named in-memory databases. In order to distinguish +environment variables for different applications in a system, the environment variable names +can be prefixed with the application name. + +.. code-block:: python + + # Use separate named in-memory databases in shared cache. + os.environ['DOGSCHOOL_SQLITE_DBNAME'] = 'file:dogschool?mode=memory&cache=shared' + os.environ['COUNTERS_SQLITE_DBNAME'] = 'file:counters?mode=memory&cache=shared' + + # Run the system tests. + test(system, MultiThreadedRunner, wait=0.2) + + +PostgreSQL Environment +====================== + +We can also run the system with the library's PostgreSQL persistence module. + +.. code-block:: python + + import os + + from eventsourcing.cipher import AESCipher + + # Generate a cipher key (keep this safe). + cipher_key = AESCipher.create_key(num_bytes=32) + + # Cipher key. + os.environ['CIPHER_KEY'] = cipher_key + # Cipher topic. + os.environ['CIPHER_TOPIC'] = 'eventsourcing.cipher:AESCipher' + # Compressor topic. + os.environ['COMPRESSOR_TOPIC'] = 'eventsourcing.compressor:ZlibCompressor' + + # Use Postgres database. + os.environ['PERSISTENCE_MODULE'] = 'eventsourcing.postgres' + + # Configure database connections. + os.environ['POSTGRES_DBNAME'] = 'eventsourcing' + os.environ['POSTGRES_HOST'] = '127.0.0.1' + os.environ['POSTGRES_PORT'] = '5432' + os.environ['POSTGRES_USER'] = 'eventsourcing' + os.environ['POSTGRES_PASSWORD'] = 'eventsourcing' + + test(system, SingleThreadedRunner) + +We can use the same PostgreSQL database for different applications in a system, +because the PostreSQL persistence module creates different tables for each application. + +However, before running the test again with PostgreSQL, we need to reset the trick counts, +because they are being stored in a durable database and so would simply accumulate. We can +do this by deleting the database tables for the system. + +.. code-block:: python + + from eventsourcing.postgres import PostgresDatastore + from eventsourcing.tests.postgres_utils import drop_postgres_table + + db = PostgresDatastore( + "eventsourcing", + "127.0.0.1", + "5432", + "eventsourcing", + "eventsourcing", + ) + drop_postgres_table(db, "dogschool_events") + drop_postgres_table(db, "counters_events") + drop_postgres_table(db, "counters_tracking") + +After resetting the trick counts, we can run the system again with the multi-threaded runner. + +.. code-block:: python + + test(system, MultiThreadedRunner, wait=0.2) + +Exercise +======== + +Firstly, replicate the code in this tutorial in your development environment. + +* Copy the code snippets above. +* Run the code with the default "plain old Python object" + persistence module. +* Configure and run the system with an SQLite database. +* Create a PostgreSQL database, and configure and run the + system with a PostgreSQL database. +* Connect to the databases with the command line clients for + SQLite and PostgreSQL, and examine the database tables to + observe the stored event records and the tracking records. + +Secondly, write an system that... + +Next steps +========== + +* For more information about event-driven systems, please read + :doc:`the system module documentation `. +* See also the :ref:`Example systems`. From 32c011acee74220d2111dc4b3a49f04999a70ac2 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:52:05 +0100 Subject: [PATCH 014/107] Improved tutorial "next steps" bullet points. --- docs/topics/tutorial/part1.rst | 6 ++---- docs/topics/tutorial/part2.rst | 7 ++++--- docs/topics/tutorial/part3.rst | 8 +++++--- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/docs/topics/tutorial/part1.rst b/docs/topics/tutorial/part1.rst index fb23c4ec..c55f33e4 100644 --- a/docs/topics/tutorial/part1.rst +++ b/docs/topics/tutorial/part1.rst @@ -430,7 +430,5 @@ classes. Next steps ========== -* For more information about event-sourced aggregates, please - read :doc:`Part 2 ` of this tutorial. -* For more information about event-sourced applications, please - read :doc:`Part 3 ` of this tutorial. +* To continue this tutorial, please read :doc:`Part 2 `. +* See also the :doc:`/topics/examples`. diff --git a/docs/topics/tutorial/part2.rst b/docs/topics/tutorial/part2.rst index 22e8e441..ddcfdb40 100644 --- a/docs/topics/tutorial/part2.rst +++ b/docs/topics/tutorial/part2.rst @@ -610,6 +610,7 @@ to be ``'ItemAdded'``. Copy the test below and make it pass. Next steps ========== -* For more information about event-sourced aggregates, please read the :doc:`the domain module documentation `. -* For more information about event-sourced applications, please read - :doc:`Part 3 ` of this tutorial. +* To continue this tutorial, please read :doc:`Part 3 `. +* For more information about event-sourced aggregates, please read the + :doc:`domain module documentation `. +* See also the :ref:`Example aggregates`. diff --git a/docs/topics/tutorial/part3.rst b/docs/topics/tutorial/part3.rst index e5c22e1d..0f3bf19c 100644 --- a/docs/topics/tutorial/part3.rst +++ b/docs/topics/tutorial/part3.rst @@ -613,7 +613,9 @@ stored event records in the database tables. Next steps ========== +* To continue this tutorial, please read :doc:`Part 4 `. * For more information about event-sourced applications, please read the - :doc:`the application module documentation `. -* For more information about storing and retrieving domain events, please read - the :doc:`persistence module documentation `. + :doc:`application module documentation `. +* For more information about storing and retrieving domain events, please read the + :doc:`persistence module documentation `. +* See also the :ref:`Example applications`. From dda4a11bde0040473dc43dba9d523552ec9ee8c7 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 12:55:04 +0100 Subject: [PATCH 015/107] Fixed test_docs.py (to check all docs again). --- eventsourcing/tests/docs_tests/test_docs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/eventsourcing/tests/docs_tests/test_docs.py b/eventsourcing/tests/docs_tests/test_docs.py index 8fdb23f4..87cd8539 100644 --- a/eventsourcing/tests/docs_tests/test_docs.py +++ b/eventsourcing/tests/docs_tests/test_docs.py @@ -102,7 +102,7 @@ def test_docs(self): for name in filenames: if name in skipped: continue - # if name.endswith(".rst"): + if name.endswith(".rst"): # if ( # name.endswith("persistence.rst") # or name.endswith("domain.rst") @@ -110,7 +110,7 @@ def test_docs(self): # or name.endswith("system.rst") # or name.endswith("examples.rst") # ): - if name.endswith('part4.rst'): + # if name.endswith('part4.rst'): # if name.endswith('aggregates_in_ddd.rst'): # if name.endswith('example_application.rst'): # if name.endswith('everything.rst'): From b63e837ec49e787d56db6bb4076dc5c176f7277c Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:10:09 +0100 Subject: [PATCH 016/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 106 +++++++++++++++++++++++---------- 1 file changed, 74 insertions(+), 32 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index bf9a711b..74db8bff 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -5,9 +5,10 @@ Tutorial - Part 4 - Systems As we saw in :doc:`Part 3 `, we can use the library's :class:`~eventsourcing.application.Application` class to define event-sourced -applications. In this part, we will create a second event-driven application which -pulls and processes event notifications from the notification log of the ``DogSchool`` -application. +applications. In this part, we will create two applications: the ``DogSchool`` +application that we discussed in Part 3, and a second application which can pull +and process the domain events of the ``DogSchool`` application from its notification +log. First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. @@ -48,24 +49,46 @@ First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. Process applications ==================== -The most important thing that needs to be defined when processing events is a policy function. -The policy function defines how individual events will be processed. The policy function creates -changes to the state of the process application. A policy function has different responses for -different types of events. - -For example, we can define a policy that processes the ``Dog.TrickAdded`` events of the ``DogSchool`` -application, so that we can count the number of tricks that have been added. - -The ``Counters`` class below extends the ``ProcessApplication`` class by implementing a ``policy()`` -function. The event-sourced aggregate ``Counter`` has a method ``increment()`` which increments the -count for a particular trick. When a ``Dog.TrickAdded`` event is processed, the ``increment()`` method -is called. - -New events created in the policy function are collected by the ``process_event`` that is passed into -the ``policy()`` function along with the ``domain_event``. This ensures that all new events created -by a policy function in responding to an upstream domain event will be recorded atomically along with -a tracking record that indicates the position of the event in the upstream sequence of the domain event -that has been processed. +Second, let's define an application which can pull and process the domain events +of the ``DogSchool`` application from its notification log. The ``Counters`` class +below extends the library's :class:`~eventsourcing.system.ProcessApplication` class. + +The most important thing that needs to be defined when processing domain events is +a policy function. + +The policy function defines how individual domain events will be processed. A policy +function has different responses for different types of domain events. Each response +creates new changes to the state of the process application. These could be changes +to an event-sourced domain model, or they could be updates to a non-event sourced +materialized view. In this example, we will make changes to an event-sourced domain +model. + +In the example below, the ``Counters`` application counts the tricks added +in the ``Dog`` aggregates. It has a ``policy()`` function that processes the +``Dog.TrickAdded`` events of the ``DogSchool`` application. It makes changes to an +event-sourced domain model comprised of ``Counter`` aggregates. + +The ``Counter`` aggregate class has a ``name`` which will correspond to the name of a trick. +It also has a `count` attribute, which is an integer value with an initial value of `0`. It +also has an ``increment()`` method, decorated with the :func:`@event` +decorator, which increments the value of `count`. + +When a ``Dog.TrickAdded`` event is processed by the ``policy()`` function of the ``Counters`` application, +the name of the trick is used to get or create a ``Counter`` aggregate object. Then, the counter's +``increment()`` method is called once. The new domain events are then collected on a "processing event" +object before the policy function returns. + +The ``policy()`` function receives two arguments: ``domain_event`` and ``process_event``. The ``domain_event`` +argument is a domain event object that is to be processed. The ``process_event`` is an instance of the +:class:`~eventsourcing.application.ProcessingEvent` class. New domain events created in the +policy function are collected by calling the process event object's +:func:`~eventsourcing.application.ProcessingEvent.collect_events` method. + +The purpose of the process event object is to hold all the new domain events created by the policy function, along +with a :class:`~eventsourcing.persistence.Tracking` object that indicates the position in the upstream sequence +of the domain event that is processed. These factors will be recorded together atomically by the process +application after the policy function returns. The tracking records are used to avoid dual writing in the +consumption and processing of domain events, so that each domain event is processed exactly once. .. code-block:: python @@ -116,11 +139,13 @@ that has been processed. Defining an event-driven system =============================== -Rather than manually constructing the applications and pulling and processing events, we can use -the library's :class:`~eventsourcing.system.System` class to indicate which application is the -"leader" and which is the "follower". In this way, just like the persistence infrastructure that -each application will use can be defined when the applications are constructed, also the manner -in which the events will be pulled and processed can be defined when the system is run. +Just like an application can be defined independently of concrete persistence infrastructure, we can +define which applications follow which other applications independently of the manner in which domain +events are pulled and processed. For this purpose, we can use the library's +:class:`~eventsourcing.system.System` class to specify a list of "pipes". + +In the example below, we define a system with one "pipe" that has the ``DogSchool`` application +followed by the ``Counters`` application. .. code-block:: python @@ -129,18 +154,35 @@ in which the events will be pulled and processed can be defined when the system system = System(pipes=[[DogSchool, Counters]]) +The system object builds a graph of the application classes, identifying "nodes" and "edges". + +.. code-block:: python + + assert list(system.nodes) == ["DogSchool", "Counters"], list(system.nodes) + assert system.edges == [("DogSchool", "Counters")], system.edges + + +When the system is run, the nodes will be instantiated as application objects, and the edges +will be used to set up the applications to "lead" and "follow" each other. Exactly how depends +upon the concrete implementation of a system runner. + + Runnning an event-driven system =============================== Just like it's possible to store events in different ways, it's possible to run an event-driven system in different ways. There are many possibilities for the orchestration of the applications in a system and for interprocess communication between the applications. One possibility is to use a single thread, -and pull and process events sequentially. Another possibility is to use multiple threads in the same -operating system process, with events processed concurrently and asynchronously. Another possibility is -to use multiple operating system processes on the same machine, or alternatively on different machines -in a network. Furthermore, when running a system with multiple operating system processes, there are -many possible alternatives for inter-process communication by which events are transported from one -application to another. +and to pull and process events synchronously and sequentially. Another possibility is to use multiple +threads in the same operating system process, with events processed concurrently and asynchronously. +If the application objects are all constructed in the same operating system process, the notification +logs can be used directly. + +Another possibility is to use multiple operating system processes on the same machine, or alternatively +on different machines in a network. When running a system with multiple operating system +processes, there notification logs must be accessed remotely across the operating system +process boundary. There are many possible alternatives for inter-process communication, +by which events are transported from one application to another. The important thing, in all these cases, is to pull and process a sequence of events, and for new state in the downstream application to be recorded atomically along with a unique tracking record From 1adb8a3ea6adbd0a445a3a31359fc4fb82fc9f25 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:15:56 +0100 Subject: [PATCH 017/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 74db8bff..934fb078 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -51,15 +51,16 @@ Process applications Second, let's define an application which can pull and process the domain events of the ``DogSchool`` application from its notification log. The ``Counters`` class -below extends the library's :class:`~eventsourcing.system.ProcessApplication` class. +below is an event processing application. It extends the library's +:class:`~eventsourcing.system.ProcessApplication` class. The most important thing that needs to be defined when processing domain events is a policy function. The policy function defines how individual domain events will be processed. A policy -function has different responses for different types of domain events. Each response -creates new changes to the state of the process application. These could be changes -to an event-sourced domain model, or they could be updates to a non-event sourced +function has different responses for different types of domain events. The policy function +may create changes to the state of the event processing application. These could be +changes to an event-sourced domain model, or they could be updates to a non-event sourced materialized view. In this example, we will make changes to an event-sourced domain model. @@ -71,7 +72,7 @@ event-sourced domain model comprised of ``Counter`` aggregates. The ``Counter`` aggregate class has a ``name`` which will correspond to the name of a trick. It also has a `count` attribute, which is an integer value with an initial value of `0`. It also has an ``increment()`` method, decorated with the :func:`@event` -decorator, which increments the value of `count`. +decorator, which increments the value of its `count` attribute. When a ``Dog.TrickAdded`` event is processed by the ``policy()`` function of the ``Counters`` application, the name of the trick is used to get or create a ``Counter`` aggregate object. Then, the counter's From 74556d60038bae110a591d2c792e4352623596f6 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:16:43 +0100 Subject: [PATCH 018/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 934fb078..7a53d666 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -70,7 +70,7 @@ in the ``Dog`` aggregates. It has a ``policy()`` function that processes the event-sourced domain model comprised of ``Counter`` aggregates. The ``Counter`` aggregate class has a ``name`` which will correspond to the name of a trick. -It also has a `count` attribute, which is an integer value with an initial value of `0`. It +It also has a `count` attribute, which is an integer value with an initial value of ``0``. It also has an ``increment()`` method, decorated with the :func:`@event` decorator, which increments the value of its `count` attribute. From 44c1aaf84b04bfc892ff7a6a09e8027dd4b5b930 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:21:27 +0100 Subject: [PATCH 019/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 7a53d666..b2265c35 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -70,7 +70,7 @@ in the ``Dog`` aggregates. It has a ``policy()`` function that processes the event-sourced domain model comprised of ``Counter`` aggregates. The ``Counter`` aggregate class has a ``name`` which will correspond to the name of a trick. -It also has a `count` attribute, which is an integer value with an initial value of ``0``. It +It also has a ``count`` attribute, which is an integer value with an initial value of ``0``. It also has an ``increment()`` method, decorated with the :func:`@event` decorator, which increments the value of its `count` attribute. From 9b7dfcabca6fc7278b1e7ef5928e39bb94ee4440 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:21:51 +0100 Subject: [PATCH 020/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index b2265c35..2e155260 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -72,7 +72,7 @@ event-sourced domain model comprised of ``Counter`` aggregates. The ``Counter`` aggregate class has a ``name`` which will correspond to the name of a trick. It also has a ``count`` attribute, which is an integer value with an initial value of ``0``. It also has an ``increment()`` method, decorated with the :func:`@event` -decorator, which increments the value of its `count` attribute. +decorator, which increments the value of its ``count`` attribute. When a ``Dog.TrickAdded`` event is processed by the ``policy()`` function of the ``Counters`` application, the name of the trick is used to get or create a ``Counter`` aggregate object. Then, the counter's From 20a24c7e25bae882802271dbbe2da26a2b98606a Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:26:05 +0100 Subject: [PATCH 021/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 2e155260..dbd10937 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -141,7 +141,7 @@ Defining an event-driven system =============================== Just like an application can be defined independently of concrete persistence infrastructure, we can -define which applications follow which other applications independently of the manner in which domain +define which applications "follow" which other applications independently of the manner in which domain events are pulled and processed. For this purpose, we can use the library's :class:`~eventsourcing.system.System` class to specify a list of "pipes". @@ -181,7 +181,7 @@ logs can be used directly. Another possibility is to use multiple operating system processes on the same machine, or alternatively on different machines in a network. When running a system with multiple operating system -processes, there notification logs must be accessed remotely across the operating system +processes, their notification logs must be accessed remotely across the operating system process boundary. There are many possible alternatives for inter-process communication, by which events are transported from one application to another. From 7817939332139b52acedc4a0f8d421bcf59bb3ed Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:28:06 +0100 Subject: [PATCH 022/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index dbd10937..9ab5b352 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -196,17 +196,19 @@ The :class:`~eventsourcing.system.SingleThreadedRunner` and a :class:`~eventsour implement the abstract :class:`~eventsourcing.system.Runner` class. These system runners are constructed with an instance of the :class:`~eventsourcing.system.System` class, and optionally an ``env`` dictionary. -The runners have a :func:`~eventsourcing.system.Runner.start`` method which constructs and connects the -applications. The runners also have a :func:`~eventsourcing.system.Runner.get`` method, which returns an +The runners have a :func:`~eventsourcing.system.Runner.start` method which constructs and connects the +applications. The runners also have a :func:`~eventsourcing.system.Runner.get` method, which returns an application. When application command methods are called, new events will be propagated and processed, according to the system definition and the application policies. Application query methods can be used to obtain the resulting state of the system. The ``test()`` function below shows how the abstract runner interface can be used to operate the dog school -trick counting system. We will call the ``test()`` function firstly with the -:class:`~eventsourcing.system.SingleThreadedRunner` and then the :class:`~eventsourcing.system.MultiThreadedRunner`. -The applications will use the POPO persistence module by default. We will then run the system with the -library's SQLite persistence module, and then the PosgreSQL persistence module. +trick counting system. + +We will run the ``test()`` function firstly with the :class:`~eventsourcing.system.SingleThreadedRunner` and +then the :class:`~eventsourcing.system.MultiThreadedRunner`. The applications will use the POPO persistence +module by default. We will then run the test again, with the library's SQLite persistence module, and then +with the PostgreSQL persistence module. .. code-block:: python From 262f98216fc608f10e965a13dca84a33940e091a Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:37:20 +0100 Subject: [PATCH 023/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 9ab5b352..4f04e0c0 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -216,12 +216,15 @@ with the PostgreSQL persistence module. def test(system, runner_class, wait=0, env=None): + # Start running the system. runner = runner_class(system, env=env) runner.start() + # Get the application objects. school = runner.get(DogSchool) counters = runner.get(Counters) + # Generate some events. dog_id1 = school.register_dog('Billy') dog_id2 = school.register_dog('Milly') dog_id3 = school.register_dog('Scrappy') @@ -230,29 +233,34 @@ with the PostgreSQL persistence module. school.add_trick(dog_id2, 'roll over') school.add_trick(dog_id3, 'roll over') + # Wait in case events are processed asynchronously. sleep(wait) + # Check the results of processing the events. assert counters.get_count('roll over') == 3 assert counters.get_count('fetch ball') == 0 assert counters.get_count('play dead') == 0 + # Generate more events. school.add_trick(dog_id1, 'fetch ball') school.add_trick(dog_id2, 'fetch ball') + # Check the results. sleep(wait) - assert counters.get_count('roll over') == 3 assert counters.get_count('fetch ball') == 2 assert counters.get_count('play dead') == 0 + # Generate more events. school.add_trick(dog_id1, 'play dead') + # Check the results. sleep(wait) - assert counters.get_count('roll over') == 3 assert counters.get_count('fetch ball') == 2 assert counters.get_count('play dead') == 1 + # Stop the runner. runner.stop() @@ -274,7 +282,8 @@ The applications will use the default POPO persistence module, because the envir Multi-threaded runner ===================== -We can also run the system with the :class:`~eventsourcing.system.MultiThreadedRunner`. +We can also run the system with the :class:`~eventsourcing.system.MultiThreadedRunner`. Because +the events are processed asynchronously, we need to ``wait`` for the results. .. code-block:: python @@ -282,12 +291,15 @@ We can also run the system with the :class:`~eventsourcing.system.MultiThreadedR test(system, MultiThreadedRunner, wait=0.1) +Again, the applications will use the default POPO persistence module, because the environment variable +``PERSISTENCE_MODULE`` has not been set. + SQLite environment ================== -We can also run the system after configuring the applications to use the library's SQLite persistence module. -In the example below, the applications use an in-memory SQLite database. +We can also run the system of applications with the library's SQLite persistence module. +In the example below, the applications use in-memory SQLite databases. .. code-block:: python @@ -308,9 +320,10 @@ careful to use separate databases for each application. We could use a file-base database, but here we will use in-memory SQLite databases. Because we need SQLite's in-memory databases to support multi-threading, we need to enable SQLite's shared cache. Because we need to enable the shared cache, and we need more than one database in the same operating -system process, we also need to use named in-memory databases. In order to distinguish -environment variables for different applications in a system, the environment variable names -can be prefixed with the application name. +system process, we also need to use named in-memory databases. The SQLite URI pattern +`''file:{NAME}?mode=memory&cache=shared'` specifies a named in-memory database that has shared cache. +In order to distinguish environment variables for different applications in a system, the environment +variable names should be prefixed with the application name. .. code-block:: python From 4348dd2fc05d691f9b38948a625ac34ea86de52a Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:40:26 +0100 Subject: [PATCH 024/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 4f04e0c0..ab15d1c3 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -86,8 +86,8 @@ policy function are collected by calling the process event object's :func:`~eventsourcing.application.ProcessingEvent.collect_events` method. The purpose of the process event object is to hold all the new domain events created by the policy function, along -with a :class:`~eventsourcing.persistence.Tracking` object that indicates the position in the upstream sequence -of the domain event that is processed. These factors will be recorded together atomically by the process +with a :class:`~eventsourcing.persistence.Tracking` object that indicates a position in an application sequence +of the domain event that is being processed. These factors will be recorded together atomically by the process application after the policy function returns. The tracking records are used to avoid dual writing in the consumption and processing of domain events, so that each domain event is processed exactly once. From 4317f7fbadd47b8b6e3b69c0172b41a1a693b795 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 14:49:31 +0100 Subject: [PATCH 025/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index ab15d1c3..c8375b7b 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -192,8 +192,8 @@ to use the last recorded position in the downstream application to pull subseque upstream application. To demonstrate how this works, this library provides a :class:`~eventsourcing.system.SingleThreadedRunner` and a :class:`~eventsourcing.system.MultiThreadedRunner`. -The :class:`~eventsourcing.system.SingleThreadedRunner` and a :class:`~eventsourcing.system.MultiThreadedRunner` -implement the abstract :class:`~eventsourcing.system.Runner` class. These system runners are constructed +The :class:`~eventsourcing.system.SingleThreadedRunner` and :class:`~eventsourcing.system.MultiThreadedRunner` +classes implement the abstract :class:`~eventsourcing.system.Runner` class. These system runners are constructed with an instance of the :class:`~eventsourcing.system.System` class, and optionally an ``env`` dictionary. The runners have a :func:`~eventsourcing.system.Runner.start` method which constructs and connects the @@ -275,6 +275,8 @@ We can run the system with the :class:`~eventsourcing.system.SingleThreadedRunne test(system, SingleThreadedRunner) +When the events are processed synchronously, we do not need to ``wait`` for the results, +because the events will have been processed before the application command returns. The applications will use the default POPO persistence module, because the environment variable ``PERSISTENCE_MODULE`` has not been set. @@ -315,6 +317,9 @@ In the example below, the applications use in-memory SQLite databases. # Run the system tests. test(system, SingleThreadedRunner) +When the events are processed synchronously, we do not need to ``wait`` for the results, +because the events will have been processed before the application command returns. + When running the system with the multi-threaded runner and SQLite databases, we need to be careful to use separate databases for each application. We could use a file-based database, but here we will use in-memory SQLite databases. Because we need SQLite's in-memory @@ -335,10 +340,13 @@ variable names should be prefixed with the application name. test(system, MultiThreadedRunner, wait=0.2) +When the events are processed asynchronously, we need to ``wait`` for the results. + PostgreSQL Environment ====================== -We can also run the system with the library's PostgreSQL persistence module. +We can also run the system with the library's PostgreSQL persistence module. Just for fun, +we will also configure the system to compress and encrypt the domain events. .. code-block:: python @@ -368,8 +376,9 @@ We can also run the system with the library's PostgreSQL persistence module. test(system, SingleThreadedRunner) -We can use the same PostgreSQL database for different applications in a system, -because the PostreSQL persistence module creates different tables for each application. + +Although we must use different SQLite databases for different applications, we can use the same PostgreSQL +database, because the PostreSQL persistence module creates separate tables for each application. However, before running the test again with PostgreSQL, we need to reset the trick counts, because they are being stored in a durable database and so would simply accumulate. We can @@ -391,12 +400,13 @@ do this by deleting the database tables for the system. drop_postgres_table(db, "counters_events") drop_postgres_table(db, "counters_tracking") -After resetting the trick counts, we can run the system again with the multi-threaded runner. +After resetting the recorded state of the system, we can run the system again with the multi-threaded runner. .. code-block:: python test(system, MultiThreadedRunner, wait=0.2) + Exercise ======== From 3cff4665ab54168bd54c856fa1f33058f22e2825 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:02:20 +0100 Subject: [PATCH 026/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 63 +++++++++++++++++++++------------- 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index c8375b7b..5ea1b5b0 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -14,6 +14,8 @@ First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. .. code-block:: python + from uuid import uuid5, NAMESPACE_URL + from eventsourcing.application import Application from eventsourcing.domain import Aggregate, event @@ -24,13 +26,13 @@ First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. self.save(dog) return dog.id - def add_trick(self, dog_id, trick): - dog = self.repository.get(dog_id) + def add_trick(self, name, trick): + dog = self.repository.get(Dog.create_id(name)) dog.add_trick(trick=trick) self.save(dog) - def get_dog(self, dog_id): - dog = self.repository.get(dog_id) + def get_dog(self, name): + dog = self.repository.get(Dog.create_id(name)) return {'name': dog.name, 'tricks': tuple(dog.tricks)} @@ -40,6 +42,10 @@ First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. self.name = name self.tricks = [] + @classmethod + def create_id(cls, name): + return uuid5(NAMESPACE_URL, f'/dogs/{name}') + @event('TrickAdded') def add_trick(self, trick): self.tricks.append(trick) @@ -93,11 +99,11 @@ consumption and processing of domain events, so that each domain event is proces .. code-block:: python - from uuid import uuid5, NAMESPACE_URL from eventsourcing.application import AggregateNotFound from eventsourcing.system import ProcessApplication from eventsourcing.dispatch import singledispatchmethod + class Counters(ProcessApplication): @singledispatchmethod def policy(self, domain_event, process_event): @@ -159,8 +165,8 @@ The system object builds a graph of the application classes, identifying "nodes" .. code-block:: python - assert list(system.nodes) == ["DogSchool", "Counters"], list(system.nodes) - assert system.edges == [("DogSchool", "Counters")], system.edges + assert list(system.nodes) == ['DogSchool', 'Counters'], list(system.nodes) + assert system.edges == [('DogSchool', 'Counters')], system.edges When the system is run, the nodes will be instantiated as application objects, and the edges @@ -225,13 +231,13 @@ with the PostgreSQL persistence module. counters = runner.get(Counters) # Generate some events. - dog_id1 = school.register_dog('Billy') - dog_id2 = school.register_dog('Milly') - dog_id3 = school.register_dog('Scrappy') + school.register_dog('Billy') + school.register_dog('Milly') + school.register_dog('Scrappy') - school.add_trick(dog_id1, 'roll over') - school.add_trick(dog_id2, 'roll over') - school.add_trick(dog_id3, 'roll over') + school.add_trick('Billy', 'roll over') + school.add_trick('Milly', 'roll over') + school.add_trick('Scrappy', 'roll over') # Wait in case events are processed asynchronously. sleep(wait) @@ -242,8 +248,8 @@ with the PostgreSQL persistence module. assert counters.get_count('play dead') == 0 # Generate more events. - school.add_trick(dog_id1, 'fetch ball') - school.add_trick(dog_id2, 'fetch ball') + school.add_trick('Billy', 'fetch ball') + school.add_trick('Milly', 'fetch ball') # Check the results. sleep(wait) @@ -252,7 +258,7 @@ with the PostgreSQL persistence module. assert counters.get_count('play dead') == 0 # Generate more events. - school.add_trick(dog_id1, 'play dead') + school.add_trick('Billy', 'play dead') # Check the results. sleep(wait) @@ -390,15 +396,15 @@ do this by deleting the database tables for the system. from eventsourcing.tests.postgres_utils import drop_postgres_table db = PostgresDatastore( - "eventsourcing", - "127.0.0.1", - "5432", - "eventsourcing", - "eventsourcing", + 'eventsourcing', + '127.0.0.1', + '5432', + 'eventsourcing', + 'eventsourcing', ) - drop_postgres_table(db, "dogschool_events") - drop_postgres_table(db, "counters_events") - drop_postgres_table(db, "counters_tracking") + drop_postgres_table(db, 'dogschool_events') + drop_postgres_table(db, 'counters_events') + drop_postgres_table(db, 'counters_tracking') After resetting the recorded state of the system, we can run the system again with the multi-threaded runner. @@ -407,6 +413,15 @@ After resetting the recorded state of the system, we can run the system again wi test(system, MultiThreadedRunner, wait=0.2) +When the state of the system is recorded in a durable database, we can access the +state of the system's applications by directly constructing the application objects. + +.. code-block:: python + + assert DogSchool().get_dog('Scrappy')['tricks'] == ('roll over',) + assert Counters().get_count('roll over') == 3 + + Exercise ======== From 233e358f382756b0d96ec6021f465178945812c1 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:04:03 +0100 Subject: [PATCH 027/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 5ea1b5b0..eabae6e1 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -332,7 +332,7 @@ database, but here we will use in-memory SQLite databases. Because we need SQLit databases to support multi-threading, we need to enable SQLite's shared cache. Because we need to enable the shared cache, and we need more than one database in the same operating system process, we also need to use named in-memory databases. The SQLite URI pattern -`''file:{NAME}?mode=memory&cache=shared'` specifies a named in-memory database that has shared cache. +``'file:{NAME}?mode=memory&cache=shared'`` specifies a named in-memory database that has shared cache. In order to distinguish environment variables for different applications in a system, the environment variable names should be prefixed with the application name. From 90e782823892504f4ba3301338a0e73e78657311 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:04:46 +0100 Subject: [PATCH 028/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index eabae6e1..5a5dac09 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -332,7 +332,7 @@ database, but here we will use in-memory SQLite databases. Because we need SQLit databases to support multi-threading, we need to enable SQLite's shared cache. Because we need to enable the shared cache, and we need more than one database in the same operating system process, we also need to use named in-memory databases. The SQLite URI pattern -``'file:{NAME}?mode=memory&cache=shared'`` specifies a named in-memory database that has shared cache. +``'file:{NAME}?mode=memory&cache=shared'`` specifies a named in-memory database that has a shared cache. In order to distinguish environment variables for different applications in a system, the environment variable names should be prefixed with the application name. From 954fc34167510120a2bdfc3e6ae51ba2fd3faa94 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:29:55 +0100 Subject: [PATCH 029/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 5a5dac09..08c03a4f 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -24,7 +24,6 @@ First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. def register_dog(self, name): dog = Dog(name) self.save(dog) - return dog.id def add_trick(self, name, trick): dog = self.repository.get(Dog.create_id(name)) @@ -348,7 +347,8 @@ variable names should be prefixed with the application name. When the events are processed asynchronously, we need to ``wait`` for the results. -PostgreSQL Environment + +PostgreSQL environment ====================== We can also run the system with the library's PostgreSQL persistence module. Just for fun, @@ -422,10 +422,10 @@ state of the system's applications by directly constructing the application obje assert Counters().get_count('roll over') == 3 -Exercise -======== +Exercises +========= -Firstly, replicate the code in this tutorial in your development environment. +1. Replicate the code in this tutorial in your development environment. * Copy the code snippets above. * Run the code with the default "plain old Python object" @@ -433,11 +433,21 @@ Firstly, replicate the code in this tutorial in your development environment. * Configure and run the system with an SQLite database. * Create a PostgreSQL database, and configure and run the system with a PostgreSQL database. +* Configure the system to run the ``DogSchool`` application + with a PostgreSQL database and the ``Counters`` application + with a file-based SQLite database. * Connect to the databases with the command line clients for SQLite and PostgreSQL, and examine the database tables to observe the stored event records and the tracking records. -Secondly, write an system that... +2. Write a system that has a ``Game`` application with ``Player`` aggregates that +have a ``score`` which can be updated, that is followed by a `HallOfFame` application that +processes the score update events into an event-sourced `HighScoreTable` aggregate that keeps +a list of the top three scores. + +3. Write a system that processes events with "exactly once semantics" into +a non-event sourced materialized view. + Next steps ========== From 60f2b227671c2223e2ae79c31d6512c6b4f752da Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:30:21 +0100 Subject: [PATCH 030/107] Adjusted tutorial exercises. --- docs/topics/tutorial/part1.rst | 8 ++++---- docs/topics/tutorial/part2.rst | 8 +++++--- docs/topics/tutorial/part3.rst | 8 ++++---- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/docs/topics/tutorial/part1.rst b/docs/topics/tutorial/part1.rst index c55f33e4..81ea36d1 100644 --- a/docs/topics/tutorial/part1.rst +++ b/docs/topics/tutorial/part1.rst @@ -402,13 +402,13 @@ projects ` Exercise ======== -Completing this exercise depends on: +Completing this exercises in this tutorial depends on: * having a working Python installation, * :doc:`installing the eventsourcing library `, and * knowing how to `write and run tests in Python `_. -Copy the ``test_dog_school()`` function (see above) into a Python file, for example +1. Copy the ``test_dog_school()`` function (see above) into a Python file, for example ``test_application.py``. Then run the test function and see that it fails. Then add the ``DogSchool`` application and the ``Dog`` aggregate code. Then run the test function again and see that it passes. @@ -417,12 +417,12 @@ again and see that it passes. test_dog_school() -When your code is working, refactor by moving the application and +2. When your code is working, refactor by moving the application and aggregate classes to separate Python files, for example ``application.py`` and ``domainmodel.py``. After completing your refactorings, run the test again to make sure your code still works. -If you are feeling playful, you can use a debugger or add some print +3. If you are feeling playful, you can use a debugger or add some print statements to step through what happens in the aggregate and application classes. diff --git a/docs/topics/tutorial/part2.rst b/docs/topics/tutorial/part2.rst index ddcfdb40..42fd4b89 100644 --- a/docs/topics/tutorial/part2.rst +++ b/docs/topics/tutorial/part2.rst @@ -533,10 +533,12 @@ event is triggered when the "private" method ``_trick_added()`` is called by the assert copy == dog -Exercise -======== +Exercises +========= -Define a ``Todos`` aggregate, that has a given ``name`` and a list of ``items``. +1. Replicate the code in this tutorial in your development environment. + +2. Define a ``Todos`` aggregate, that has a given ``name`` and a list of ``items``. Define a method ``add_item()`` that adds a new item to the list. Specify the name of the "created" event to be ``'Started'`` and the name of the subsequent event to be ``'ItemAdded'``. Copy the test below and make it pass. diff --git a/docs/topics/tutorial/part3.rst b/docs/topics/tutorial/part3.rst index 0f3bf19c..9624afe2 100644 --- a/docs/topics/tutorial/part3.rst +++ b/docs/topics/tutorial/part3.rst @@ -588,10 +588,10 @@ In this example, stored events are both compressed and encrypted. In consequence we can expect the recorded values not to be visible in the database records. -Exercise -======== +Exercises +========= -Firstly, replicate the code in this tutorial in your development environment. +1. Replicate the code in this tutorial in your development environment. * Copy the code snippets above. * Run the application code with the default "plain old Python object" @@ -603,7 +603,7 @@ Firstly, replicate the code in this tutorial in your development environment. SQLite and PostgreSQL, and examine the database tables to observe the stored event records. -Secondly, write an application class that uses the ``Todos`` aggregate +2. Write an application class that uses the ``Todos`` aggregate class you created in the exercise at the end of :doc:`Part 2 `. Run your application class with default "plain old Python object" persistence module, and then with an SQLite database, and finally with a PostgreSQL database. Look at the From 4f3bfe5206fded445a548930d6f71d049697b30d Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:32:31 +0100 Subject: [PATCH 031/107] Adjusted tutorial exercises. --- docs/topics/tutorial/part4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 08c03a4f..c5f14b14 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -441,8 +441,8 @@ Exercises observe the stored event records and the tracking records. 2. Write a system that has a ``Game`` application with ``Player`` aggregates that -have a ``score`` which can be updated, that is followed by a `HallOfFame` application that -processes the score update events into an event-sourced `HighScoreTable` aggregate that keeps +have a ``score`` which can be updated, that is followed by a ``HallOfFame`` application that +processes the score update events into an event-sourced ``HighScoreTable`` aggregate that keeps a list of the top three scores. 3. Write a system that processes events with "exactly once semantics" into From c583c8c2355b1889f6089b7dd20667a71b0f3090 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:37:28 +0100 Subject: [PATCH 032/107] Fixed docstring (removed extra word). --- eventsourcing/system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eventsourcing/system.py b/eventsourcing/system.py index 01a22ad4..b8c6bbb6 100644 --- a/eventsourcing/system.py +++ b/eventsourcing/system.py @@ -495,7 +495,7 @@ def receive_recording_event(self, recording_event: RecordingEvent) -> None: to a list of prompted names. Then, unless this method has previously been called and not yet returned, - each the of the prompted names is resolved to a leader application, and its + each of the prompted names is resolved to a leader application, and its followers pull and process events from that application. This may lead to further names being added to the list of prompted names. This process continues until there are no more prompted names. In this way, a system From 1a4f097d8dbae2fb32a3c1c69f44e0fedb9b5b45 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 15:55:35 +0100 Subject: [PATCH 033/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index c5f14b14..607c5a64 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -5,10 +5,12 @@ Tutorial - Part 4 - Systems As we saw in :doc:`Part 3 `, we can use the library's :class:`~eventsourcing.application.Application` class to define event-sourced -applications. In this part, we will create two applications: the ``DogSchool`` -application that we discussed in Part 3, and a second application which can pull -and process the domain events of the ``DogSchool`` application from its notification -log. +applications. Now let's explore event-driven systems. + +We will create two applications: the ``DogSchool`` application that we discussed in Part 3, +and a second application which processes the domain events of the ``DogSchool`` +application. We will compose these two applications into a "system" and run the system +with a "runner", with different combinations of runners and persistence modules. First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. From 0c0b81aacefe8f8ed9b620f84e0c7747e0d3c107 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 16:04:05 +0100 Subject: [PATCH 034/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 607c5a64..a5e59544 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -175,8 +175,8 @@ will be used to set up the applications to "lead" and "follow" each other. Exact upon the concrete implementation of a system runner. -Runnning an event-driven system -=============================== +Running an event-driven system +============================== Just like it's possible to store events in different ways, it's possible to run an event-driven system in different ways. There are many possibilities for the orchestration of the applications in a system From a9cd18e73ae88551e47a95253a6a660db09132d7 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 16:22:48 +0100 Subject: [PATCH 035/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index a5e59544..9bafb683 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -56,10 +56,9 @@ First, let's define the ``DogSchool`` application and the ``Dog`` aggregate. Process applications ==================== -Second, let's define an application which can pull and process the domain events -of the ``DogSchool`` application from its notification log. The ``Counters`` class -below is an event processing application. It extends the library's -:class:`~eventsourcing.system.ProcessApplication` class. +Second, let's define an application which counts tricks added in the ``Dog`` aggregates, by +processing the domain events of the ``DogSchool`` application. The ``Counters`` class below is +an event processing application. It extends the library's :class:`~eventsourcing.system.ProcessApplication` class. The most important thing that needs to be defined when processing domain events is a policy function. From 03198b5a1243c83d464576b08c5493c9ae369f85 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 19:30:31 +0100 Subject: [PATCH 036/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 9bafb683..5f4a3117 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -165,8 +165,8 @@ The system object builds a graph of the application classes, identifying "nodes" .. code-block:: python - assert list(system.nodes) == ['DogSchool', 'Counters'], list(system.nodes) - assert system.edges == [('DogSchool', 'Counters')], system.edges + assert list(system.nodes) == ['DogSchool', 'Counters'] + assert system.edges == [('DogSchool', 'Counters')] When the system is run, the nodes will be instantiated as application objects, and the edges From 3e8ca6f782fcdca9d2a57bb12daa715dbd3dac03 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Wed, 18 Oct 2023 20:01:40 +0100 Subject: [PATCH 037/107] Improved wording in tutorial part4.rst. --- docs/topics/tutorial/part4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part4.rst b/docs/topics/tutorial/part4.rst index 5f4a3117..d9994b9d 100644 --- a/docs/topics/tutorial/part4.rst +++ b/docs/topics/tutorial/part4.rst @@ -182,12 +182,12 @@ in different ways. There are many possibilities for the orchestration of the app and for interprocess communication between the applications. One possibility is to use a single thread, and to pull and process events synchronously and sequentially. Another possibility is to use multiple threads in the same operating system process, with events processed concurrently and asynchronously. -If the application objects are all constructed in the same operating system process, the notification +If the application objects are all constructed in the same operating system process, their notification logs can be used directly. Another possibility is to use multiple operating system processes on the same machine, or alternatively on different machines in a network. When running a system with multiple operating system -processes, their notification logs must be accessed remotely across the operating system +processes, the notification logs must be accessed remotely across the operating system process boundary. There are many possible alternatives for inter-process communication, by which events are transported from one application to another. From c210506714d7d0b9ac9ee08fcefe09ad2a6f9e6e Mon Sep 17 00:00:00 2001 From: johnbywater Date: Fri, 27 Oct 2023 02:40:33 +0100 Subject: [PATCH 038/107] Fixed docstrings. --- eventsourcing/persistence.py | 2 +- eventsourcing/postgres.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/eventsourcing/persistence.py b/eventsourcing/persistence.py index efebf877..2b63fa63 100644 --- a/eventsourcing/persistence.py +++ b/eventsourcing/persistence.py @@ -1106,7 +1106,7 @@ def put_connection(self, conn: TConnection) -> None: Returns connections to the pool, or closes connection if the pool is full. - Unlocks write lock after writer has returned., and + Unlocks write lock after writer has returned, and updates count of readers when readers are returned. Notifies waiters when connections have been returned, diff --git a/eventsourcing/postgres.py b/eventsourcing/postgres.py index d6f6ce29..55fcfc80 100644 --- a/eventsourcing/postgres.py +++ b/eventsourcing/postgres.py @@ -422,7 +422,7 @@ def _insert_events( # notification_id values in order, and by locking the table for writes, # it can be guaranteed. The EXCLUSIVE lock mode does not block # the ACCESS SHARE lock which is acquired during SELECT statements, - # so the table can be read concurrently. However INSERT normally + # so the table can be read concurrently. However, INSERT normally # just acquires ROW EXCLUSIVE locks, which risks interleaving of # many inserts in one transaction with many insert in another # transaction. Since one transaction will commit before another, From 1e202f97d9b1b74c7b676dd4ed7bf7ade9103a4d Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sat, 20 Jan 2024 13:54:39 +0000 Subject: [PATCH 039/107] Resolved mypy issue (issue from new type inference in 1.7.0). Also adjusted exception value string. --- eventsourcing/application.py | 19 +++++++++++-------- .../aggregate7/test_snapshotting_intervals.py | 4 +--- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/eventsourcing/application.py b/eventsourcing/application.py index 0163db22..509e275f 100644 --- a/eventsourcing/application.py +++ b/eventsourcing/application.py @@ -848,9 +848,6 @@ def _take_snapshots(self, processing_event: ProcessingEvent) -> None: interval = self.snapshotting_intervals.get(type(aggregate)) if interval is not None: if event.originator_version % interval == 0: - projector_func: ProjectorFunction[ - MutableOrImmutableAggregate, DomainEventProtocol - ] if ( self.snapshotting_projectors and type(aggregate) in self.snapshotting_projectors @@ -860,14 +857,20 @@ def _take_snapshots(self, processing_event: ProcessingEvent) -> None: ] else: projector_func = project_aggregate - if ( - not isinstance(event, CanMutateProtocol) - and projector_func is project_aggregate + if projector_func is project_aggregate and not isinstance( + event, CanMutateProtocol ): raise ProgrammingError( ( - "Aggregate projector function not found. Please set " - "snapshotting_projectors on application class." + f"Cannot take snapshot for {type(aggregate)} with " + "default project_aggregate() function, because its " + f"domain event {type(event)} does not implement " + "the 'can mutate' protocol (see CanMutateProtocol)." + f" Please define application class {type(self)}" + " with class variable 'snapshotting_projectors', " + f"to be a dict that has {type(aggregate)} as a key " + "with the aggregate projector function for " + f"{type(aggregate)} as the value for that key." ) ) self.take_snapshot( diff --git a/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py b/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py index 599b1717..f5ba4ea5 100644 --- a/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py +++ b/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py @@ -38,9 +38,7 @@ def test_dog_school(self) -> None: with self.assertRaises(ProgrammingError) as cm: school.register_dog("Fido") - self.assertIn( - "set snapshotting_projectors on application class", cm.exception.args[0] - ) + self.assertIn("Cannot take snapshot", cm.exception.args[0]) # Set snapshotting_projectors. SubDogSchool.snapshotting_projectors = {Dog: project_dog} From b421de32fdc36bdb4f77a2a7ae6a651f0a9ba323 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sat, 20 Jan 2024 14:21:33 +0000 Subject: [PATCH 040/107] Resolved mypy issue (issue from new type inference in 1.7.0). Also adjusted exception value string. --- .github/workflows/runtests.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/runtests.yaml b/.github/workflows/runtests.yaml index 070ac14c..220f4270 100644 --- a/.github/workflows/runtests.yaml +++ b/.github/workflows/runtests.yaml @@ -50,6 +50,7 @@ jobs: - name: Lint run: make lint + if: matrix.python_version != '3.7' - name: Test run: make test From 4b3c9affb8a19c8ba1d1a1f7c0be19e6cd607a73 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sat, 20 Jan 2024 14:23:48 +0000 Subject: [PATCH 041/107] Resolved mypy issue (issue from new type inference in 1.7.0). Also adjusted exception value string. --- .github/workflows/runtests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/runtests.yaml b/.github/workflows/runtests.yaml index 220f4270..c974d2f7 100644 --- a/.github/workflows/runtests.yaml +++ b/.github/workflows/runtests.yaml @@ -50,7 +50,7 @@ jobs: - name: Lint run: make lint - if: matrix.python_version != '3.7' + if: matrix.python-version != '3.7' - name: Test run: make test From f54df85477698c004c73e7050ea5c32c7e02b1fd Mon Sep 17 00:00:00 2001 From: Nurzhan Zhanuzak <88082838+nurzhanzhanuzak@users.noreply.github.com> Date: Sat, 20 Jan 2024 14:39:56 +0600 Subject: [PATCH 042/107] Update part2.rst Fix typo: trick_added -> add_trick --- docs/topics/tutorial/part2.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/topics/tutorial/part2.rst b/docs/topics/tutorial/part2.rst index 42fd4b89..714cbd47 100644 --- a/docs/topics/tutorial/part2.rst +++ b/docs/topics/tutorial/part2.rst @@ -496,9 +496,9 @@ command method ``add_trick()`` that calls a decorated "private" method ``_add_tr self.tricks.append(trick) -Because the "public" command method ``trick_added()`` is not decorated with the +Because the "public" command method ``add_trick()`` is not decorated with the :func:`@event` decorator, it does not trigger an event when it is called. Instead, the -event is triggered when the "private" method ``_trick_added()`` is called by the +event is triggered when the "private" method ``_add_trick()`` is called by the "public" method. .. From d8cc9c26e6db9641ecc96548c18aa7c39c5fbdd0 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:15:11 +0200 Subject: [PATCH 043/107] from __future__ import annotations --- eventsourcing/examples/aggregate1/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate1/application.py b/eventsourcing/examples/aggregate1/application.py index 3e8777d3..29df684f 100644 --- a/eventsourcing/examples/aggregate1/application.py +++ b/eventsourcing/examples/aggregate1/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From 867dd4c1dbb26cddbc23defb70ec651aeff7e89e Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:15:29 +0200 Subject: [PATCH 044/107] Update domainmodel.py --- eventsourcing/examples/aggregate1/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate1/domainmodel.py b/eventsourcing/examples/aggregate1/domainmodel.py index fe880c54..a8bfa760 100644 --- a/eventsourcing/examples/aggregate1/domainmodel.py +++ b/eventsourcing/examples/aggregate1/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List from eventsourcing.domain import Aggregate, event From 4a9146c4b11064ac59ef1cab65c7c843a81fd6d4 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:15:54 +0200 Subject: [PATCH 045/107] from __future__ import annotations --- eventsourcing/examples/aggregate1/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate1/test_application.py b/eventsourcing/examples/aggregate1/test_application.py index 77031dba..d80477a4 100644 --- a/eventsourcing/examples/aggregate1/test_application.py +++ b/eventsourcing/examples/aggregate1/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate1.application import DogSchool From bf7a0a97ae63548ed8d08ec01da0591871f52a9a Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:16:37 +0200 Subject: [PATCH 046/107] from __future__ import annotations --- eventsourcing/examples/aggregate2/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate2/application.py b/eventsourcing/examples/aggregate2/application.py index 4630d537..e5337124 100644 --- a/eventsourcing/examples/aggregate2/application.py +++ b/eventsourcing/examples/aggregate2/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From b912b700b8036c98d12a8b345797d653f5e9e6c8 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:16:51 +0200 Subject: [PATCH 047/107] Update domainmodel.py --- eventsourcing/examples/aggregate2/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate2/domainmodel.py b/eventsourcing/examples/aggregate2/domainmodel.py index f9f3dbf4..e802f2b9 100644 --- a/eventsourcing/examples/aggregate2/domainmodel.py +++ b/eventsourcing/examples/aggregate2/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List from eventsourcing.domain import Aggregate, event From 6bb162d40c5e5c21d95ece4c63932e17baefa989 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:17:01 +0200 Subject: [PATCH 048/107] Update test_application.py --- eventsourcing/examples/aggregate2/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate2/test_application.py b/eventsourcing/examples/aggregate2/test_application.py index 0ecf03fc..88f8bf7e 100644 --- a/eventsourcing/examples/aggregate2/test_application.py +++ b/eventsourcing/examples/aggregate2/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate2.application import DogSchool From b9350452194e08012d7a5022438aa567ec162926 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:17:21 +0200 Subject: [PATCH 049/107] from __future__ import annotations --- eventsourcing/examples/aggregate3/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate3/application.py b/eventsourcing/examples/aggregate3/application.py index 23012ca8..94755b3b 100644 --- a/eventsourcing/examples/aggregate3/application.py +++ b/eventsourcing/examples/aggregate3/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From 90fd584f7cee0aaa1c1077b211af3f3e30186f30 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:17:35 +0200 Subject: [PATCH 050/107] from __future__ import annotations --- eventsourcing/examples/aggregate3/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate3/domainmodel.py b/eventsourcing/examples/aggregate3/domainmodel.py index 58c5e13a..c72912cf 100644 --- a/eventsourcing/examples/aggregate3/domainmodel.py +++ b/eventsourcing/examples/aggregate3/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List, cast from eventsourcing.dispatch import singledispatchmethod From 2d31c6f3cb29be9e4d6770d8c8e4d86c27f9b0c2 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:17:44 +0200 Subject: [PATCH 051/107] Update test_application.py --- eventsourcing/examples/aggregate3/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate3/test_application.py b/eventsourcing/examples/aggregate3/test_application.py index 80908bb3..f98d98ba 100644 --- a/eventsourcing/examples/aggregate3/test_application.py +++ b/eventsourcing/examples/aggregate3/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate3.application import DogSchool From 1eea43a7482b211a4451d6077640e736430164f4 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:17:54 +0200 Subject: [PATCH 052/107] Update application.py --- eventsourcing/examples/aggregate4/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate4/application.py b/eventsourcing/examples/aggregate4/application.py index cd43b3fe..55c890ec 100644 --- a/eventsourcing/examples/aggregate4/application.py +++ b/eventsourcing/examples/aggregate4/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From 3aad9051800aa06b5a3788cfb77cc6e131a96ab3 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:18:39 +0200 Subject: [PATCH 053/107] Update domainmodel.py --- eventsourcing/examples/aggregate4/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate4/domainmodel.py b/eventsourcing/examples/aggregate4/domainmodel.py index 99af73a4..706fe1e1 100644 --- a/eventsourcing/examples/aggregate4/domainmodel.py +++ b/eventsourcing/examples/aggregate4/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections import defaultdict from dataclasses import dataclass from datetime import datetime, timezone From 177fba5dc1a29a9cba680ddbff795bb6c91b2448 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:18:52 +0200 Subject: [PATCH 054/107] from __future__ import annotations --- eventsourcing/examples/aggregate4/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate4/test_application.py b/eventsourcing/examples/aggregate4/test_application.py index 354972e8..30e6c584 100644 --- a/eventsourcing/examples/aggregate4/test_application.py +++ b/eventsourcing/examples/aggregate4/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate4.application import DogSchool From b5b4ddcdcf7ee85d3822c03e251735f068bcaf26 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:19:04 +0200 Subject: [PATCH 055/107] from __future__ import annotations --- eventsourcing/examples/aggregate5/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate5/application.py b/eventsourcing/examples/aggregate5/application.py index 82102520..23e06656 100644 --- a/eventsourcing/examples/aggregate5/application.py +++ b/eventsourcing/examples/aggregate5/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From af2421b0ff4782120f75e7cab1941b61cf421685 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:19:13 +0200 Subject: [PATCH 056/107] Update domainmodel.py --- eventsourcing/examples/aggregate5/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate5/domainmodel.py b/eventsourcing/examples/aggregate5/domainmodel.py index ebe075f4..489d094f 100644 --- a/eventsourcing/examples/aggregate5/domainmodel.py +++ b/eventsourcing/examples/aggregate5/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from dataclasses import dataclass from datetime import datetime, timezone from typing import Any, Iterable, Optional, Tuple, Type, TypeVar From ced21fb991caf098d272ebd4bd7e4b7ea7f9f32f Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:19:25 +0200 Subject: [PATCH 057/107] Update test_application.py --- eventsourcing/examples/aggregate5/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate5/test_application.py b/eventsourcing/examples/aggregate5/test_application.py index 6b99e17f..13534e76 100644 --- a/eventsourcing/examples/aggregate5/test_application.py +++ b/eventsourcing/examples/aggregate5/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate5.application import DogSchool From 1018bd6c464606f1deadeac78232a18336c4ed28 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:19:39 +0200 Subject: [PATCH 058/107] from __future__ import annotations --- eventsourcing/examples/aggregate6/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate6/application.py b/eventsourcing/examples/aggregate6/application.py index a419d97f..d64fa4c1 100644 --- a/eventsourcing/examples/aggregate6/application.py +++ b/eventsourcing/examples/aggregate6/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From 0485bfcbcf593aa9a728938cbcd9ff704bfbc5ce Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:19:49 +0200 Subject: [PATCH 059/107] from __future__ import annotations --- eventsourcing/examples/aggregate6/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate6/domainmodel.py b/eventsourcing/examples/aggregate6/domainmodel.py index ca5e114e..ef0898fb 100644 --- a/eventsourcing/examples/aggregate6/domainmodel.py +++ b/eventsourcing/examples/aggregate6/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from dataclasses import dataclass from datetime import datetime, timezone from functools import singledispatch From 0bd22b92e7d85aa51fe1ed70a4a3f277686f24a0 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:20:01 +0200 Subject: [PATCH 060/107] from __future__ import annotations --- eventsourcing/examples/aggregate6/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate6/test_application.py b/eventsourcing/examples/aggregate6/test_application.py index e71bf81a..be655bd5 100644 --- a/eventsourcing/examples/aggregate6/test_application.py +++ b/eventsourcing/examples/aggregate6/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate6.application import DogSchool From 1bacf7a4629053b92fc68bfd92f0d177cf289b65 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:20:15 +0200 Subject: [PATCH 061/107] Update application.py --- eventsourcing/examples/aggregate7/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate7/application.py b/eventsourcing/examples/aggregate7/application.py index e454ba52..97abfcec 100644 --- a/eventsourcing/examples/aggregate7/application.py +++ b/eventsourcing/examples/aggregate7/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From d23007a78087d151706ea28e99f84948d5e3e415 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:20:27 +0200 Subject: [PATCH 062/107] Update persistence.py --- eventsourcing/examples/aggregate7/persistence.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate7/persistence.py b/eventsourcing/examples/aggregate7/persistence.py index e328730f..1d44a56f 100644 --- a/eventsourcing/examples/aggregate7/persistence.py +++ b/eventsourcing/examples/aggregate7/persistence.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict, cast import orjson From f46addf4c433d41c79b5818cabc788cf30e370cb Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:20:35 +0200 Subject: [PATCH 063/107] Update test_application.py --- eventsourcing/examples/aggregate7/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate7/test_application.py b/eventsourcing/examples/aggregate7/test_application.py index 9aa502f2..46fb7b31 100644 --- a/eventsourcing/examples/aggregate7/test_application.py +++ b/eventsourcing/examples/aggregate7/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Tuple from unittest import TestCase From 42fc20670bb598929527ba2ee7b79d2723513712 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:20:45 +0200 Subject: [PATCH 064/107] Update test_compression_and_encryption.py --- .../examples/aggregate7/test_compression_and_encryption.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate7/test_compression_and_encryption.py b/eventsourcing/examples/aggregate7/test_compression_and_encryption.py index 1abffe17..02427fc7 100644 --- a/eventsourcing/examples/aggregate7/test_compression_and_encryption.py +++ b/eventsourcing/examples/aggregate7/test_compression_and_encryption.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Tuple from unittest import TestCase From dc669faa5b3790f210c1a857e12f940e36062c66 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:20:53 +0200 Subject: [PATCH 065/107] Update test_snapshotting_intervals.py --- .../examples/aggregate7/test_snapshotting_intervals.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py b/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py index f5ba4ea5..ad135887 100644 --- a/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py +++ b/eventsourcing/examples/aggregate7/test_snapshotting_intervals.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Tuple, cast from unittest import TestCase from uuid import UUID From 4f91d5f04a0fb3cebbe4ec3b5bbe75cc1aed93ef Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:04 +0200 Subject: [PATCH 066/107] Update application.py --- eventsourcing/examples/aggregate8/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate8/application.py b/eventsourcing/examples/aggregate8/application.py index 102c6277..3a868497 100644 --- a/eventsourcing/examples/aggregate8/application.py +++ b/eventsourcing/examples/aggregate8/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict from uuid import UUID From 609f0fb228a0a83cbb51662657a996196b01bb70 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:12 +0200 Subject: [PATCH 067/107] Update domainmodel.py --- eventsourcing/examples/aggregate8/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate8/domainmodel.py b/eventsourcing/examples/aggregate8/domainmodel.py index 0f62e7d8..24037fba 100644 --- a/eventsourcing/examples/aggregate8/domainmodel.py +++ b/eventsourcing/examples/aggregate8/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from typing import List from uuid import UUID From 54501032851593d6129a7414742e59bcf5b5cda8 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:19 +0200 Subject: [PATCH 068/107] Update persistence.py --- eventsourcing/examples/aggregate8/persistence.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate8/persistence.py b/eventsourcing/examples/aggregate8/persistence.py index e328730f..1d44a56f 100644 --- a/eventsourcing/examples/aggregate8/persistence.py +++ b/eventsourcing/examples/aggregate8/persistence.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict, cast import orjson From 551bc4be6436ae7fdb4e3d8bf64615bf8d991a40 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:27 +0200 Subject: [PATCH 069/107] Update test_application.py --- eventsourcing/examples/aggregate8/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate8/test_application.py b/eventsourcing/examples/aggregate8/test_application.py index 0ef76f9e..c06b5522 100644 --- a/eventsourcing/examples/aggregate8/test_application.py +++ b/eventsourcing/examples/aggregate8/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Tuple from unittest import TestCase From d69682dea627e8ae3334203244b552e3d09576c7 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:35 +0200 Subject: [PATCH 070/107] Update test_compression_and_encryption.py --- .../examples/aggregate8/test_compression_and_encryption.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate8/test_compression_and_encryption.py b/eventsourcing/examples/aggregate8/test_compression_and_encryption.py index 454f0407..821829c6 100644 --- a/eventsourcing/examples/aggregate8/test_compression_and_encryption.py +++ b/eventsourcing/examples/aggregate8/test_compression_and_encryption.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.cipher import AESCipher From 4816baa726b6af84db382de6dcfcf7ef9d8f1b95 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:42 +0200 Subject: [PATCH 071/107] Update test_snapshotting_intervals.py --- .../examples/aggregate8/test_snapshotting_intervals.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py b/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py index 078bbd46..75053bbb 100644 --- a/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py +++ b/eventsourcing/examples/aggregate8/test_snapshotting_intervals.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from unittest import TestCase from eventsourcing.examples.aggregate8.application import DogSchool From 1fa3c88b44f48f90836b6ea13a52c1dee9f6a77a Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:21:53 +0200 Subject: [PATCH 072/107] Update application.py --- eventsourcing/examples/bankaccounts/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/bankaccounts/application.py b/eventsourcing/examples/bankaccounts/application.py index 6998c406..e6aaf7bc 100644 --- a/eventsourcing/examples/bankaccounts/application.py +++ b/eventsourcing/examples/bankaccounts/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from decimal import Decimal from uuid import UUID From b629d530326c3a2f650accd9d4e4cb797b7d8132 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:22:00 +0200 Subject: [PATCH 073/107] Update domainmodel.py --- eventsourcing/examples/bankaccounts/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/bankaccounts/domainmodel.py b/eventsourcing/examples/bankaccounts/domainmodel.py index a92f2f04..f2267541 100644 --- a/eventsourcing/examples/bankaccounts/domainmodel.py +++ b/eventsourcing/examples/bankaccounts/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from decimal import Decimal from eventsourcing.domain import Aggregate, event From 3cb1314e7f2180640d3a8cfdea7ac8a2974cc176 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:22:08 +0200 Subject: [PATCH 074/107] Update test.py --- eventsourcing/examples/bankaccounts/test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/bankaccounts/test.py b/eventsourcing/examples/bankaccounts/test.py index 5e209a1a..a89e5a14 100644 --- a/eventsourcing/examples/bankaccounts/test.py +++ b/eventsourcing/examples/bankaccounts/test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import unittest from decimal import Decimal from uuid import uuid4 From fe847f155d5f5888b7cd384bb5ae24141ea71b4a Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:22:22 +0200 Subject: [PATCH 075/107] Update application.py --- eventsourcing/examples/cargoshipping/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/cargoshipping/application.py b/eventsourcing/examples/cargoshipping/application.py index 85100f11..12793ba6 100644 --- a/eventsourcing/examples/cargoshipping/application.py +++ b/eventsourcing/examples/cargoshipping/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from typing import Any, Dict, List, Optional, cast from uuid import UUID From aa2afc5152a170151b7c6cfb49940826a392ffef Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:22:37 +0200 Subject: [PATCH 076/107] Update domainmodel.py --- eventsourcing/examples/cargoshipping/domainmodel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/cargoshipping/domainmodel.py b/eventsourcing/examples/cargoshipping/domainmodel.py index 24d5ea8c..fd92df01 100644 --- a/eventsourcing/examples/cargoshipping/domainmodel.py +++ b/eventsourcing/examples/cargoshipping/domainmodel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime, timedelta from enum import Enum from typing import Dict, List, Optional, Tuple, Union, cast From aaca874b368207a58e75400ad92b694a03d23af7 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:22:52 +0200 Subject: [PATCH 077/107] Update interface.py --- eventsourcing/examples/cargoshipping/interface.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/cargoshipping/interface.py b/eventsourcing/examples/cargoshipping/interface.py index 86c6cd67..324f6cc1 100644 --- a/eventsourcing/examples/cargoshipping/interface.py +++ b/eventsourcing/examples/cargoshipping/interface.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from typing import Dict, List, Optional, Tuple, Union from uuid import UUID From f0d07d8c98e35dc04dde3f81fc21deecc7349ce8 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:22:59 +0200 Subject: [PATCH 078/107] Update test.py --- eventsourcing/examples/cargoshipping/test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/cargoshipping/test.py b/eventsourcing/examples/cargoshipping/test.py index d7ea43be..a833aae9 100644 --- a/eventsourcing/examples/cargoshipping/test.py +++ b/eventsourcing/examples/cargoshipping/test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import unittest from datetime import timedelta From 4a5caf67b5eb007f7abfb5746836d98ebc4682d3 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:23:09 +0200 Subject: [PATCH 079/107] from __future__ import annotations --- eventsourcing/examples/contentmanagement/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/contentmanagement/application.py b/eventsourcing/examples/contentmanagement/application.py index 2fb9ae98..5e9d5bc5 100644 --- a/eventsourcing/examples/contentmanagement/application.py +++ b/eventsourcing/examples/contentmanagement/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict, Iterator, Optional, Union, cast from uuid import NAMESPACE_URL, UUID, uuid5 From 64ce4e168710beb86c63f04c3e5be0a4fa6b9d3e Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:23:23 +0200 Subject: [PATCH 080/107] Update test.py --- eventsourcing/examples/contentmanagement/test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/contentmanagement/test.py b/eventsourcing/examples/contentmanagement/test.py index 9836d6c9..15e86cb3 100644 --- a/eventsourcing/examples/contentmanagement/test.py +++ b/eventsourcing/examples/contentmanagement/test.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import cast from unittest import TestCase from uuid import uuid4 From a27c0b79fa4cb16b723bdf65eb458f841110576e Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:23:42 +0200 Subject: [PATCH 081/107] Update utils.py --- eventsourcing/examples/contentmanagement/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/contentmanagement/utils.py b/eventsourcing/examples/contentmanagement/utils.py index 976c828b..39c5db0b 100644 --- a/eventsourcing/examples/contentmanagement/utils.py +++ b/eventsourcing/examples/contentmanagement/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from tempfile import TemporaryDirectory From 079d48a5c25e84a60daf9c7bf8b67d1ade2c639d Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:23:52 +0200 Subject: [PATCH 082/107] Update application.py --- eventsourcing/examples/contentmanagementsystem/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/contentmanagementsystem/application.py b/eventsourcing/examples/contentmanagementsystem/application.py index 7598079d..ece94573 100644 --- a/eventsourcing/examples/contentmanagementsystem/application.py +++ b/eventsourcing/examples/contentmanagementsystem/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List, cast from uuid import UUID From ba9b19e22c6d9f3b50b5787c94d5f22fa4370af4 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:24:15 +0200 Subject: [PATCH 083/107] Update system.py --- eventsourcing/examples/contentmanagementsystem/system.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/contentmanagementsystem/system.py b/eventsourcing/examples/contentmanagementsystem/system.py index b4b61ef9..b4078328 100644 --- a/eventsourcing/examples/contentmanagementsystem/system.py +++ b/eventsourcing/examples/contentmanagementsystem/system.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from eventsourcing.examples.contentmanagement.application import ( ContentManagementApplication, ) From 4caa1f79b901420bc37c70f4e444d24597389e08 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:24:24 +0200 Subject: [PATCH 084/107] Update test_system.py --- eventsourcing/examples/contentmanagementsystem/test_system.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/contentmanagementsystem/test_system.py b/eventsourcing/examples/contentmanagementsystem/test_system.py index 404e1416..11b4c9cd 100644 --- a/eventsourcing/examples/contentmanagementsystem/test_system.py +++ b/eventsourcing/examples/contentmanagementsystem/test_system.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Dict from unittest import TestCase from uuid import uuid4 From 33ea19ccee30b351ff2ef71d192d9c5b94c73c94 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:24:35 +0200 Subject: [PATCH 085/107] Update application.py --- eventsourcing/examples/searchablecontent/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/searchablecontent/application.py b/eventsourcing/examples/searchablecontent/application.py index bce0d624..80842393 100644 --- a/eventsourcing/examples/searchablecontent/application.py +++ b/eventsourcing/examples/searchablecontent/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, List, Optional, Tuple, Union, cast from uuid import UUID From 2b1cbdd7e79d4da21cad05829e84573d917cf35e Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:24:43 +0200 Subject: [PATCH 086/107] Update persistence.py --- eventsourcing/examples/searchablecontent/persistence.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/searchablecontent/persistence.py b/eventsourcing/examples/searchablecontent/persistence.py index ade65273..cedff666 100644 --- a/eventsourcing/examples/searchablecontent/persistence.py +++ b/eventsourcing/examples/searchablecontent/persistence.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import abstractmethod from typing import List, Tuple from uuid import UUID From dc7c0fe7dd441e4f18fddb2b9e2b8c2ecb987733 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:24:55 +0200 Subject: [PATCH 087/107] Update postgres.py --- eventsourcing/examples/searchablecontent/postgres.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/searchablecontent/postgres.py b/eventsourcing/examples/searchablecontent/postgres.py index 5a742fc9..ffcc3900 100644 --- a/eventsourcing/examples/searchablecontent/postgres.py +++ b/eventsourcing/examples/searchablecontent/postgres.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, List, Optional, Sequence, Tuple from uuid import UUID From f9f85ce6a4a9f8b38a2a98401b12316529de5f5b Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:25:07 +0200 Subject: [PATCH 088/107] Update sqlite.py --- eventsourcing/examples/searchablecontent/sqlite.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/searchablecontent/sqlite.py b/eventsourcing/examples/searchablecontent/sqlite.py index 68c1947f..4d3181b0 100644 --- a/eventsourcing/examples/searchablecontent/sqlite.py +++ b/eventsourcing/examples/searchablecontent/sqlite.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, List, Optional, Sequence, Tuple from uuid import UUID From f0c5ad4feb8d876b6313739b04226052e379d208 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:25:19 +0200 Subject: [PATCH 089/107] Update test_application.py --- eventsourcing/examples/searchablecontent/test_application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/searchablecontent/test_application.py b/eventsourcing/examples/searchablecontent/test_application.py index 95680c54..38808156 100644 --- a/eventsourcing/examples/searchablecontent/test_application.py +++ b/eventsourcing/examples/searchablecontent/test_application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from typing import Dict from unittest import TestCase From 8d8aa9adf4b02a454450aafcedcb893bfad12408 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:25:30 +0200 Subject: [PATCH 090/107] Update test_recorder.py --- eventsourcing/examples/searchablecontent/test_recorder.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/searchablecontent/test_recorder.py b/eventsourcing/examples/searchablecontent/test_recorder.py index fef3f378..27b1f7cb 100644 --- a/eventsourcing/examples/searchablecontent/test_recorder.py +++ b/eventsourcing/examples/searchablecontent/test_recorder.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from typing import Dict, cast from unittest import TestCase From 1a51d9a0c2021bc05eef5d6e251564683b8a1223 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:25:50 +0200 Subject: [PATCH 091/107] Update test_invoice.py --- eventsourcing/examples/test_invoice.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/test_invoice.py b/eventsourcing/examples/test_invoice.py index 26f961a4..72692ed5 100644 --- a/eventsourcing/examples/test_invoice.py +++ b/eventsourcing/examples/test_invoice.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from dataclasses import dataclass from datetime import datetime from decimal import Decimal From cf002728ecedc6d3678796e132c1aeee827515a7 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:26:03 +0200 Subject: [PATCH 092/107] Update test_parking_lot.py --- eventsourcing/examples/test_parking_lot.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/examples/test_parking_lot.py b/eventsourcing/examples/test_parking_lot.py index 422a6a18..c041b309 100644 --- a/eventsourcing/examples/test_parking_lot.py +++ b/eventsourcing/examples/test_parking_lot.py @@ -2,6 +2,8 @@ After Ed Blackburn's https://github.com/edblackburn/parking-lot/. """ +from __future__ import annotations + import re from dataclasses import dataclass from datetime import datetime, timedelta From a681b2102f2efd378d1d428b4d76f60238e9fcdc Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:26:12 +0200 Subject: [PATCH 093/107] Update application.py --- eventsourcing/application.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/application.py b/eventsourcing/application.py index 509e275f..09089ce1 100644 --- a/eventsourcing/application.py +++ b/eventsourcing/application.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from abc import ABC, abstractmethod from copy import deepcopy From 2d15304e59196939eb7f23e76aa168d4e0a3e667 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:26:32 +0200 Subject: [PATCH 094/107] Update cipher.py --- eventsourcing/cipher.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/cipher.py b/eventsourcing/cipher.py index 63abf0d4..68853e5a 100644 --- a/eventsourcing/cipher.py +++ b/eventsourcing/cipher.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from base64 import b64decode, b64encode From cc19825775867013e3b94740182f78a75649c3f5 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:26:42 +0200 Subject: [PATCH 095/107] Update compressor.py --- eventsourcing/compressor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/compressor.py b/eventsourcing/compressor.py index 142b53e3..c4d2f892 100644 --- a/eventsourcing/compressor.py +++ b/eventsourcing/compressor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import zlib from eventsourcing.persistence import Compressor From 0afe26f4cdc6b7fd22e7ecc6d8f5af215af1e1b4 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:27:25 +0200 Subject: [PATCH 096/107] Update interface.py --- eventsourcing/interface.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/interface.py b/eventsourcing/interface.py index 3fd4fa4c..77c3289c 100644 --- a/eventsourcing/interface.py +++ b/eventsourcing/interface.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from abc import ABC, abstractmethod from base64 import b64decode, b64encode From dc6588a3c1ed68645cba4bd42da1f0e807979897 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:27:36 +0200 Subject: [PATCH 097/107] Update persistence.py --- eventsourcing/persistence.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/persistence.py b/eventsourcing/persistence.py index 2b63fa63..1561ae7e 100644 --- a/eventsourcing/persistence.py +++ b/eventsourcing/persistence.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import uuid from abc import ABC, abstractmethod From 98031e3ba1028c0655709e67c549eb8d085f08be Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:27:45 +0200 Subject: [PATCH 098/107] Update popo.py --- eventsourcing/popo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/popo.py b/eventsourcing/popo.py index 7e80bdd4..3246047d 100644 --- a/eventsourcing/popo.py +++ b/eventsourcing/popo.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections import defaultdict from threading import Lock from typing import Any, Dict, Iterable, List, Optional, Sequence, Set From 59927760168eb9572d92b4cc07cdf430ebd10cf4 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:27:52 +0200 Subject: [PATCH 099/107] Update postgres.py --- eventsourcing/postgres.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/postgres.py b/eventsourcing/postgres.py index 55fcfc80..d5965914 100644 --- a/eventsourcing/postgres.py +++ b/eventsourcing/postgres.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from contextlib import contextmanager from itertools import chain from threading import Lock From 3a2c750533ea04949f98c39890fadd9c3db83121 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:28:03 +0200 Subject: [PATCH 100/107] Update sqlite.py --- eventsourcing/sqlite.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/sqlite.py b/eventsourcing/sqlite.py index 75781f2d..7ff5b74a 100644 --- a/eventsourcing/sqlite.py +++ b/eventsourcing/sqlite.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sqlite3 from contextlib import contextmanager from sqlite3 import Connection, Cursor From cfb81d48f0476b7dc5dafb5b6f7054a37c045b41 Mon Sep 17 00:00:00 2001 From: Mikhail Mitusov <154631181+MikeMitusov@users.noreply.github.com> Date: Sat, 20 Jan 2024 16:28:12 +0200 Subject: [PATCH 101/107] Update utils.py --- eventsourcing/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eventsourcing/utils.py b/eventsourcing/utils.py index 41230036..aacddb22 100644 --- a/eventsourcing/utils.py +++ b/eventsourcing/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import sys from functools import wraps From ee665490eff10cc2ba3a37f1eb931a891e63ac1c Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sat, 20 Jan 2024 19:45:24 +0000 Subject: [PATCH 102/107] Fixed singledispatch for future annotations. --- docs/topics/examples/aggregate5.rst | 11 +++--- eventsourcing/dispatch.py | 38 ++++++++++++++++++- .../examples/aggregate3/domainmodel.py | 2 - .../examples/aggregate4/domainmodel.py | 6 +-- .../examples/aggregate5/application.py | 6 +-- .../examples/aggregate5/domainmodel.py | 20 ++++++---- .../examples/cargoshipping/domainmodel.py | 6 +-- 7 files changed, 62 insertions(+), 27 deletions(-) diff --git a/docs/topics/examples/aggregate5.rst b/docs/topics/examples/aggregate5.rst index 58f1bb7f..bca6d7ae 100644 --- a/docs/topics/examples/aggregate5.rst +++ b/docs/topics/examples/aggregate5.rst @@ -11,19 +11,18 @@ Like in the previous example, this example also does *not* use the library's ``Aggregate`` and ``DomainEvent`` base classes. In contrast to the previous examples, the aggregate is defined as a frozen data class so that it is an immutable object. This has implications for the aggregate command methods, which must -simply return the events that they trigger without mutating the state of the -aggregate object. +return the events that they trigger. -The ``Dog`` aggregate is an immutable frozen data class, but it otherwise similar +The ``Dog`` aggregate is an immutable frozen data class, but it is otherwise similar to the previous example. It explicitly defines event classes. And it explicitly triggers events in command methods. However, it has a ``mutate()`` method which evolves aggregate state by constructing a new instance of the aggregate class for each event. The application code in this example must receive the new events that -are triggered when calling the aggregate command methods. The aggregate -projector function must also be supplied when getting an aggregate from the -repository and when taking snapshots. +are triggered when calling the aggregate command methods, and pass them +to the ``save()`` method. The aggregate projector function must also be +supplied when getting an aggregate from the repository and when taking snapshots. Domain model ------------ diff --git a/eventsourcing/dispatch.py b/eventsourcing/dispatch.py index 5311151c..06be055c 100644 --- a/eventsourcing/dispatch.py +++ b/eventsourcing/dispatch.py @@ -6,6 +6,10 @@ from functools import singledispatchmethod as _singledispatchmethod class singledispatchmethod(_singledispatchmethod): + def __init__(self, func): + super().__init__(func) + self.deferred_registrations = [] + def register(self, cls, method=None): """generic_method.register(cls, func) -> func @@ -17,7 +21,23 @@ def register(self, cls, method=None): first_annotation[k] = v break cls.__annotations__ = first_annotation - return self.dispatcher.register(cls, func=method) + + # for globals in typing.get_type_hints() in Python 3.8 and 3.9 + if not hasattr(cls, "__wrapped__"): + cls.__wrapped__ = cls.__func__ + + try: + return self.dispatcher.register(cls, func=method) + except NameError: + self.deferred_registrations.append([cls, method]) + # Todo: Fix this.... + return method or cls + + def __get__(self, obj, cls=None): + for registered_cls, registered_method in self.deferred_registrations: + self.dispatcher.register(registered_cls, func=registered_method) + self.deferred_registrations = [] + return super().__get__(obj, cls=cls) else: from functools import singledispatch, update_wrapper @@ -35,6 +55,7 @@ def __init__(self, func): self.dispatcher = singledispatch(func) self.func = func + self.deferred_registrations = [] def register(self, cls, method=None): """generic_method.register(cls, func) -> func @@ -47,9 +68,22 @@ def register(self, cls, method=None): first_annotation[k] = v break cls.__annotations__ = first_annotation - return self.dispatcher.register(cls, func=method) + cls.__wrapped__ = cls.__func__ # for globals in typing.get_type_hints() + try: + return self.dispatcher.register(cls, func=method) + except NameError as e: + self.deferred_registrations.append([cls, method, e]) + # Todo: Fix this.... + return method or cls def __get__(self, obj, cls=None): + for cls, method, original_e in self.deferred_registrations: + try: + self.dispatcher.register(cls, func=method) + except NameError as e: + raise original_e from e + self.deferred_registrations = [] + def _method(*args, **kwargs): method = self.dispatcher.dispatch(args[0].__class__) return method.__get__(obj, cls)(*args, **kwargs) diff --git a/eventsourcing/examples/aggregate3/domainmodel.py b/eventsourcing/examples/aggregate3/domainmodel.py index c72912cf..58c5e13a 100644 --- a/eventsourcing/examples/aggregate3/domainmodel.py +++ b/eventsourcing/examples/aggregate3/domainmodel.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import List, cast from eventsourcing.dispatch import singledispatchmethod diff --git a/eventsourcing/examples/aggregate4/domainmodel.py b/eventsourcing/examples/aggregate4/domainmodel.py index 706fe1e1..58d16905 100644 --- a/eventsourcing/examples/aggregate4/domainmodel.py +++ b/eventsourcing/examples/aggregate4/domainmodel.py @@ -113,17 +113,17 @@ def add_trick(self, trick: str) -> None: def apply(self, event: DomainEvent) -> None: """Applies event to aggregate.""" - @apply.register + @apply.register(Registered) def _(self, event: Registered) -> None: super().__init__(event) self.name = event.name self.tricks: List[str] = [] - @apply.register + @apply.register(TrickAdded) def _(self, event: TrickAdded) -> None: self.tricks.append(event.trick) self.version = event.originator_version - @apply.register + @apply.register(Snapshot) def _(self, event: Snapshot) -> None: self.__dict__.update(event.state) diff --git a/eventsourcing/examples/aggregate5/application.py b/eventsourcing/examples/aggregate5/application.py index 23e06656..9ed02993 100644 --- a/eventsourcing/examples/aggregate5/application.py +++ b/eventsourcing/examples/aggregate5/application.py @@ -11,13 +11,13 @@ class DogSchool(Application): is_snapshotting_enabled = True def register_dog(self, name: str) -> UUID: - event = Dog.register(name) + dog, event = Dog.register(name) self.save(event) - return event.originator_id + return dog.id def add_trick(self, dog_id: UUID, trick: str) -> None: dog = self.repository.get(dog_id, projector_func=Dog.projector) - event = dog.add_trick(trick) + dog, event = dog.add_trick(trick) self.save(event) def get_dog(self, dog_id: UUID) -> Dict[str, Any]: diff --git a/eventsourcing/examples/aggregate5/domainmodel.py b/eventsourcing/examples/aggregate5/domainmodel.py index 489d094f..e9074007 100644 --- a/eventsourcing/examples/aggregate5/domainmodel.py +++ b/eventsourcing/examples/aggregate5/domainmodel.py @@ -73,25 +73,29 @@ class TrickAdded(DomainEvent): trick: str @staticmethod - def register(name: str) -> DomainEvent: - return Dog.Registered( + def register(name: str) -> Tuple[Dog, DomainEvent]: + event = Dog.Registered( originator_id=uuid4(), originator_version=1, timestamp=DomainEvent.create_timestamp(), name=name, ) + dog = Dog.mutate(event, None) + return dog, event - def add_trick(self, trick: str) -> DomainEvent: - return self.trigger_event(Dog.TrickAdded, trick=trick) + def add_trick(self, trick: str) -> Tuple[Dog, DomainEvent]: + event = self.trigger_event(Dog.TrickAdded, trick=trick) + dog = Dog.mutate(event, self) + return dog, event @singledispatchmethod @classmethod - def mutate(cls, event: DomainEvent, aggregate: Optional["Dog"]) -> Optional["Dog"]: + def mutate(cls, event: DomainEvent, aggregate: Optional[Dog]) -> Optional[Dog]: """Mutates aggregate with event.""" @mutate.register @classmethod - def _(cls, event: Registered, _: Optional["Dog"]) -> "Dog": + def _(cls, event: Dog.Registered, _: Optional[Dog]) -> Dog: return Dog( id=event.originator_id, version=event.originator_version, @@ -103,7 +107,7 @@ def _(cls, event: Registered, _: Optional["Dog"]) -> "Dog": @mutate.register @classmethod - def _(cls, event: TrickAdded, aggregate: Optional["Dog"]) -> "Dog": + def _(cls, event: Dog.TrickAdded, aggregate: Optional[Dog]) -> Dog: assert aggregate is not None return Dog( id=aggregate.id, @@ -116,7 +120,7 @@ def _(cls, event: TrickAdded, aggregate: Optional["Dog"]) -> "Dog": @mutate.register @classmethod - def _(cls, event: Snapshot, _: Optional["Dog"]) -> "Dog": + def _(cls, event: Snapshot, _: Optional[Dog]) -> Dog: return Dog( id=event.state["id"], version=event.state["version"], diff --git a/eventsourcing/examples/cargoshipping/domainmodel.py b/eventsourcing/examples/cargoshipping/domainmodel.py index fd92df01..b41987ed 100644 --- a/eventsourcing/examples/cargoshipping/domainmodel.py +++ b/eventsourcing/examples/cargoshipping/domainmodel.py @@ -224,7 +224,7 @@ class DestinationChanged(Event): destination: Location @when.register - def _(self, event: DestinationChanged) -> None: + def _(self, event: Cargo.DestinationChanged) -> None: self._destination = event.destination def assign_route(self, itinerary: Itinerary) -> None: @@ -234,7 +234,7 @@ class RouteAssigned(Event): route: Itinerary @when.register - def _(self, event: RouteAssigned) -> None: + def _(self, event: Cargo.RouteAssigned) -> None: self._route = event.route self._routing_status = "ROUTED" self._estimated_time_of_arrival = Cargo.Event.create_timestamp() + timedelta( @@ -265,7 +265,7 @@ class HandlingEventRegistered(Event): handling_activity: str @when.register - def _(self, event: HandlingEventRegistered) -> None: + def _(self, event: Cargo.HandlingEventRegistered) -> None: assert self.route is not None if event.handling_activity == HandlingActivity.RECEIVE: self._transport_status = "IN_PORT" From 8a2944fb6d9fb226f1f1989e511d37683eeeb236 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sat, 20 Jan 2024 23:55:08 +0000 Subject: [PATCH 103/107] Increased version to 9.2.22. --- docs/topics/release_notes.rst | 12 ++++++++++++ eventsourcing/__init__.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/topics/release_notes.rst b/docs/topics/release_notes.rst index f9815afc..9c9565d1 100644 --- a/docs/topics/release_notes.rst +++ b/docs/topics/release_notes.rst @@ -23,6 +23,18 @@ the underlying principles are the same, and so conversion of code and stored events is very possible. +Version 9.2.22 (released 20 Jan 2024) +------------------------------------- + +* Changed all modules to have "from __future__ import annotations" and adjusted + singledispatchmethod so that when forward refs cause method registration to fail, + the registration can be deferred until the method call is dispatched. +* Improved support for Pydantic (compatibility with Pydantic v2). +* Improved docs (fixed typos in Tutorial Part 2, content management system example, + better wording in module docs, added Tutorial Part 4, fixed docstrings). +* Added checks for compatibility with Python 3.12. + + Version 9.2.21 (released 13 Oct 2023) ------------------------------------- diff --git a/eventsourcing/__init__.py b/eventsourcing/__init__.py index 7358d7c2..44bf5ac5 100644 --- a/eventsourcing/__init__.py +++ b/eventsourcing/__init__.py @@ -1 +1 @@ -__version__ = "9.2.21" +__version__ = "9.2.22" From 9badb998cb6ff1fea2052a6ed0177d3a28244817 Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sun, 21 Jan 2024 00:05:33 +0000 Subject: [PATCH 104/107] Adjusted docs dependency versions. --- Makefile | 2 +- setup.py | 41 +++++++++++++++++++++++++++++++++++++++-- 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 20c289ca..1e13578b 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ DOTENV_FILE ?= dev/.env install: @pip install -U pip @pip install wheel - @pip install -e ".[dev]" + @pip install -U -e ".[dev]" .PHONY: docker-pull docker-pull: diff --git a/setup.py b/setup.py index dcaf0792..179d3f3a 100644 --- a/setup.py +++ b/setup.py @@ -10,11 +10,48 @@ postgresql_dev_requires + crypto_requires + [ + # "Sphinx==7.2.6", + # "sphinx_rtd_theme==2.2.3", + # "sphinxcontrib-applehelp==1.0.7", + # "sphinxcontrib-devhelp==1.0.5", + # "sphinxcontrib-jsmath==1.0.1", + # "sphinxcontrib-htmlhelp==2.0.4", + # "sphinxcontrib_serializinghtml==1.1.9", + # "sphinxcontrib_qthelp==1.0.6", + # "docutils==0.20.1", + # # "sphinxcontrib-serializinghtml==1.1.4", + "Sphinx==4.2.0", + "docutils==0.17.1", "sphinx_rtd_theme==1.3.0", - "orjson", - "pydantic", + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-jquery==4.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "Jinja2==3.1.2", + "Pygments==2.16.1", + "snowballstemmer==2.2.0", + "alabaster==0.7.13", + "Babel==2.13.0", + "imagesize==1.4.1", + "requests==2.31.0", + "packaging==23.2", + "MarkupSafe==2.1.3", + "charset_normalizer==3.3.0", + "idna==3.4", + "urllib3==2.0.7", + "certifi==2023.7.22", + + "pydantic==2.4.2", + "pydantic-core==2.10.1", + "annotated-types==0.6.0", + "typing-extensions==4.8.0", + + "orjson==3.9.9", ] + ) dev_requires = docs_requires + [ From c1109077ab5a918d11ce5697662c6115f6fa1dfe Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sun, 21 Jan 2024 04:52:37 +0000 Subject: [PATCH 105/107] Converted setup.py to pyproject.toml. --- .flake8 | 11 + .readthedocs.yaml | 24 +- Makefile | 278 ++-- docs/topics/domain.rst | 2 +- eventsourcing/tests/docs_tests/test_docs.py | 8 +- mypy.ini | 24 + poetry.lock | 1389 +++++++++++++++++++ pyproject.toml | 206 +++ setup.cfg | 42 - setup.py | 132 -- 10 files changed, 1791 insertions(+), 325 deletions(-) create mode 100644 .flake8 create mode 100644 mypy.ini create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..28289ed1 --- /dev/null +++ b/.flake8 @@ -0,0 +1,11 @@ +[flake8] +exclude = .git,__pycache__,.eggs,*.egg,.pip-cache,.poetry,.venv,dist,*_pb2.py,*_pb2_grpc.py +max-line-length = 88 +select = C,E,F,W,B,B950 +ignore = C101, E203, E501, W503, B027 + + +# ignore = E203,E266,E501,W503,B907,E231 +# max-complexity = 18 +# select = B,C,E,F,W,T4,B9 + diff --git a/.readthedocs.yaml b/.readthedocs.yaml index b99df1ce..3dbd34f2 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -13,6 +13,18 @@ build: # nodejs: "20" # rust: "1.70" # golang: "1.20" + jobs: + post_create_environment: + # Install poetry + # https://python-poetry.org/docs/#installing-manually + - make install-poetry + # Tell poetry to not use a virtual environment + - poetry config virtualenvs.create false + post_install: + # Install dependencies with 'docs' dependency group + # https://python-poetry.org/docs/managing-dependencies/#dependency-groups +# - poetry install --with docs + - make install-packages # Build documentation in the "docs/" directory with Sphinx sphinx: @@ -30,9 +42,9 @@ sphinx: # Optional but recommended, declare the Python requirements required # to build your documentation # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html -python: - install: - - method: pip - path: . - extra_requirements: - - docs +#python: +# install: +# - method: pip +# path: . +# extra_requirements: +# - docs diff --git a/Makefile b/Makefile index 1e13578b..212c44be 100644 --- a/Makefile +++ b/Makefile @@ -3,197 +3,191 @@ DOTENV_FILE ?= dev/.env -include $(DOTENV_FILE) -.PHONY: install -install: - @pip install -U pip - @pip install wheel - @pip install -U -e ".[dev]" - -.PHONY: docker-pull -docker-pull: - @docker-compose pull - -.PHONY: docker-build -docker-build: - @docker-compose build - -.PHONY: docker-up -docker-up: - @docker-compose up -d - @docker-compose ps - -.PHONY: docker-stop -docker-stop: - @docker-compose stop - -.PHONY: docker-down -docker-down: - @docker-compose down -v --remove-orphans - - -.PHONY: docker-logs -docker-logs: - @docker-compose logs --follow --tail=1000 +POETRY ?= poetry +POETRY_VERSION=1.5.1 +POETRY_INSTALLER_URL ?= https://install.python-poetry.org +PYTHONUNBUFFERED=1 +SAMPLES_LINE_LENGTH=70 + +.PHONY: install-poetry +install-poetry: + curl -sSL $(POETRY_INSTALLER_URL) | python3 + $(POETRY) --version + +.PHONY: install-packages +install-packages: + $(POETRY) --version + $(POETRY) install --no-root --all-extras -vv $(opts) + +.PHONY: update-packages +update-packages: + $(POETRY) update -vv +.PHONY: lint +lint: lint-black lint-flake8 lint-isort lint-mypy #lint-dockerfile .PHONY: lint-black lint-black: - @black --check --diff eventsourcing - @black --check --diff setup.py + $(POETRY) run black --check --diff eventsourcing .PHONY: lint-flake8 lint-flake8: - @flake8 eventsourcing + $(POETRY) run flake8 eventsourcing .PHONY: lint-isort lint-isort: - @isort --check-only --diff eventsourcing + $(POETRY) run isort --check-only --diff eventsourcing .PHONY: lint-mypy lint-mypy: - @mypy eventsourcing + $(POETRY) run mypy eventsourcing -.PHONY: lint-dockerfile -lint-dockerfile: - @docker run --rm -i replicated/dockerfilelint:ad65813 < ./dev/Dockerfile_eventsourcing_requirements -.PHONY: lint -lint: lint-isort lint-black lint-flake8 lint-mypy #lint-dockerfile +# .PHONY: lint-dockerfile +# lint-dockerfile: +# @docker run --rm -i replicated/dockerfilelint:ad65813 < ./dev/Dockerfile_eventsourcing_requirements +# +.PHONY: fmt +fmt: fmt-isort fmt-black + +.PHONY: fmt-black +fmt-black: + $(POETRY) run black eventsourcing .PHONY: fmt-isort fmt-isort: - @isort eventsourcing + $(POETRY) run isort eventsourcing -.PHONY: fmt-black -fmt-black: - @black eventsourcing - @black setup.py -.PHONY: fmt -fmt: fmt-isort fmt-black +.PHONY: test +test: coveragetest coverage100 timeit + +.PHONY: coveragetest +coveragetest: + $(POETRY) run coverage run -m unittest discover . -v + +.PHONY: coverage100 +coverage100: + $(POETRY) run coverage report --fail-under=100 --show-missing .PHONY: unittest unittest: - @python -m unittest discover . -v + $(POETRY) run python -m unittest discover . -v + + .PHONY: timeit timeit: timeit_popo timeit_sqlite timeit_postgres .PHONY: timeit_popo timeit_popo: - TEST_TIMEIT_FACTOR=500 python -m unittest eventsourcing.tests.application_tests.test_application_with_popo + TEST_TIMEIT_FACTOR=500 $(POETRY) run python -m unittest eventsourcing.tests.application_tests.test_application_with_popo .PHONY: timeit_sqlite timeit_sqlite: - TEST_TIMEIT_FACTOR=500 python -m unittest eventsourcing.tests.application_tests.test_application_with_sqlite + TEST_TIMEIT_FACTOR=500 $(POETRY) run python -m unittest eventsourcing.tests.application_tests.test_application_with_sqlite .PHONY: timeit_postgres timeit_postgres: - TEST_TIMEIT_FACTOR=500 python -m unittest eventsourcing.tests.application_tests.test_application_with_postgres - -.PHONY: rate -rate: rate_popo rate_sqlite rate_postgres - -.PHONY: rate_popo -rate_popo: - python -m unittest eventsourcing.tests.persistence_tests.test_popo.TestPOPOApplicationRecorder.test_concurrent_throughput + TEST_TIMEIT_FACTOR=500 $(POETRY) run python -m unittest eventsourcing.tests.application_tests.test_application_with_postgres -.PHONY: rate_sqlite -rate_sqlite: - python -m unittest eventsourcing.tests.persistence_tests.test_sqlite.TestSQLiteApplicationRecorder.test_concurrent_throughput - python -m unittest eventsourcing.tests.persistence_tests.test_sqlite.TestSQLiteApplicationRecorder.test_concurrent_throughput_in_memory_db +.PHONY: build +build: + $(POETRY) build +# $(POETRY) build -f sdist # build source distribution only -.PHONY: rate_postgres -rate_postgres: - python -m unittest eventsourcing.tests.persistence_tests.test_postgres.TestPostgresApplicationRecorder.test_concurrent_throughput +.PHONY: publish +publish: + $(POETRY) publish -.PHONY: coveragetest -coveragetest: - @coverage run -m unittest discover . -v -# @coverage run \ -# --concurrency=multiprocessing \ -# -m unittest discover \ - eventsourcing -vv --failfast -# @coverage combine -# @coverage report -# @coverage html +.PHONY: docker-pull +docker-pull: + @docker-compose pull -.PHONY: coverage100 -coverage100: - @coverage report --fail-under=100 --show-missing +.PHONY: docker-build +docker-build: + @docker-compose build -.PHONY: coveragehtml -coveragehtml: - @coverage html +.PHONY: docker-up +docker-up: + @docker-compose up -d + @docker-compose ps -.PHONY: test -test: coveragetest coverage100 timeit +.PHONY: docker-stop +docker-stop: + @docker-compose stop -.PHONY: coverage -coverage: coveragetest coveragehtml coverage100 +.PHONY: docker-down +docker-down: + @docker-compose down -v --remove-orphans -.PHONY: prepush -prepush: drop_postgres_db create_postgres_db updatetools lint docs test -.PHONY: drop_postgres_db -drop_postgres_db: - dropdb eventsourcing +.PHONY: docker-logs +docker-logs: + @docker-compose logs --follow --tail=1000 -.PHONY: create_postgres_db -create_postgres_db: - createdb eventsourcing - psql eventsourcing -c "CREATE SCHEMA myschema AUTHORIZATION eventsourcing" -.PHONY: updatetools -updatetools: - pip install -U pip - pip install -U black mypy flake8 flake8-bugbear isort python-coveralls coverage orjson pydantic +# +# +# .PHONY: coverage +# coverage: coveragetest coveragehtml coverage100 +# +# .PHONY: prepush +# prepush: drop_postgres_db create_postgres_db updatetools lint docs test +# +# .PHONY: drop_postgres_db +# drop_postgres_db: +# dropdb eventsourcing +# +# .PHONY: create_postgres_db +# create_postgres_db: +# createdb eventsourcing +# psql eventsourcing -c "CREATE SCHEMA myschema AUTHORIZATION eventsourcing" +# +# .PHONY: updatetools +# updatetools: +# pip install -U pip +# pip install -U black mypy flake8 flake8-bugbear isort python-coveralls coverage orjson pydantic +# .PHONY: docs docs: cd docs && make html - -.PHONY: brew-services-start -brew-services-start: -# brew services start mysql - brew services start postgresql -# brew services start redis -# ~/axonserver/axonserver.jar & -# cassandra -f & - - -.PHONY: brew-services-stop -brew-services-stop: -# brew services stop mysql || echo "Mysql couldn't be stopped" - brew services stop postgresql || echo "PostgreSQL couldn't be stopped" -# brew services stop redis || echo "Redis couldn't be stopped" -# pkill -15 java - - -.PHONY: prepare-dist -prepare-dist: - python ./dev/prepare-distribution.py - - -.PHONY: release-dist -release-dist: - python ./dev/release-distribution.py - - -.PHONY: test-released-distribution -test-released-distribution: - python ./dev/test-released-distribution.py - -#.PHONY: generate-grpc-protos -#generate-grpc-protos: -# python -m grpc_tools.protoc \ -# --proto_path=./eventsourcing/system/grpc \ -# --python_out=eventsourcing/system/grpc \ -# --grpc_python_out=eventsourcing/system/grpc \ -# eventsourcing/system/grpc/processor.proto - -.PHONY: ramdisk -ramdisk: - diskutil erasevolume HFS+ 'RAM Disk' `hdiutil attach -nobrowse -nomount ram://204800` +# +# .PHONY: brew-services-start +# brew-services-start: +# # brew services start mysql +# brew services start postgresql +# # brew services start redis +# # ~/axonserver/axonserver.jar & +# # cassandra -f & +# +# +# .PHONY: brew-services-stop +# brew-services-stop: +# # brew services stop mysql || echo "Mysql couldn't be stopped" +# brew services stop postgresql || echo "PostgreSQL couldn't be stopped" +# # brew services stop redis || echo "Redis couldn't be stopped" +# # pkill -15 java +# +# +# .PHONY: prepare-dist +# prepare-dist: +# python ./dev/prepare-distribution.py +# +# +# .PHONY: release-dist +# release-dist: +# python ./dev/release-distribution.py +# +# +# .PHONY: test-released-distribution +# test-released-distribution: +# python ./dev/test-released-distribution.py +# +# +# .PHONY: ramdisk +# ramdisk: +# diskutil erasevolume HFS+ 'RAM Disk' `hdiutil attach -nobrowse -nomount ram://204800` diff --git a/docs/topics/domain.rst b/docs/topics/domain.rst index aab53c5d..8590d4ff 100644 --- a/docs/topics/domain.rst +++ b/docs/topics/domain.rst @@ -2622,7 +2622,7 @@ See the examples below. # Current topics resolve. - assert get_topic(MyAggregate) == "__main__:MyAggregate" + assert get_topic(MyAggregate) == "__main__:MyAggregate", get_topic(MyAggregate) assert resolve_topic("__main__:MyAggregate") == MyAggregate assert resolve_topic("__main__:MyAggregate.Started") == MyAggregate.Started diff --git a/eventsourcing/tests/docs_tests/test_docs.py b/eventsourcing/tests/docs_tests/test_docs.py index 87cd8539..deaf5c20 100644 --- a/eventsourcing/tests/docs_tests/test_docs.py +++ b/eventsourcing/tests/docs_tests/test_docs.py @@ -9,6 +9,7 @@ from eventsourcing.postgres import PostgresDatastore from eventsourcing.tests.persistence import tmpfile_uris from eventsourcing.tests.postgres_utils import drop_postgres_table +from eventsourcing.utils import clear_topic_cache base_dir = dirname(dirname(os.path.abspath(eventsourcing.__file__))) @@ -33,6 +34,7 @@ def tearDown(self) -> None: self.clean_env() def clean_env(self): + clear_topic_cache() db = PostgresDatastore( "eventsourcing", "127.0.0.1", @@ -260,14 +262,16 @@ def check_code_snippets_in_file(self, doc_path): print("{} lines of code in {}".format(num_code_lines, doc_path)) + source = "\n".join(lines) + "\n" + # Write the code into a temp file. tempfile = NamedTemporaryFile("w+") temp_path = tempfile.name - tempfile.writelines("\n".join(lines) + "\n") + tempfile.writelines(source) tempfile.flush() # Run the code and catch errors. - p = Popen([sys.executable, temp_path], stdout=PIPE, stderr=PIPE) + p = Popen([sys.executable, temp_path], stdout=PIPE, stderr=PIPE, env={"PYTHONPATH": base_dir}) out, err = p.communicate() out = out.decode("utf8") err = err.decode("utf8") diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..92d37d18 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,24 @@ +[mypy] +python_version = 3.8 +check_untyped_defs = True +no_implicit_reexport = True + +ignore_missing_imports = True +incremental = True +follow_imports = normal +warn_redundant_casts = True +warn_unused_ignores = True +strict_optional = True +no_implicit_optional = True +disallow_untyped_defs = True +disallow_any_generics = True + +[mypy-eventsourcing.tests.*] +ignore_errors = True + +[mypy-eventsourcing.tests.application_tests.test_event_sourced_log] +ignore_errors = False + +[mypy-eventsourcing.dispatch] +ignore_errors = True + diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..9ac41044 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1389 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = true +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = true +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.13.0" +description = "Internationalization utilities" +optional = true +python-versions = ">=3.7" +files = [ + {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, + {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] + +[package.extras] +tzdata = ["tzdata"] + +[[package]] +name = "black" +version = "24.1a1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.1a1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d139b9531e6bb6d129497a46475535d8289dddc861a5b980f908c36597b9817"}, + {file = "black-24.1a1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2220c470c22476ca9631337b0daae41be2b215599919b19d576a956ad38aca69"}, + {file = "black-24.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a12829e372563ffff10c18c7aff1ef274da6afbc7bc8ccdb5fcc8ff84cab43f"}, + {file = "black-24.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:d47b6530c55c092a9d841a12c8b3ad838bd639bebf6660a3df9dae83d4ab83c1"}, + {file = "black-24.1a1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b594b3ede60182215d258c76de2de64712d2e8424442ff4402276e22684abbe"}, + {file = "black-24.1a1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915a6b6b916fc66edec886fc71b60284e447d8fa39d22b879af7ae6efccca90f"}, + {file = "black-24.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0a7ea9aa1c108924e31f1204a1e2534af255dbaa24ecbb8c05f47341a7b6f1"}, + {file = "black-24.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:41c0ce5cbdb701900c166bcca08ac941b64cf1d6967509e3caeab126da0ae0d0"}, + {file = "black-24.1a1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c8165fad00b03d9c1d400b1dd250479792f49d012807ee45162d323d04fc06"}, + {file = "black-24.1a1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e3c74b35ea179bb69440286b81c309a64c34a032746a9eef3399dc3ce671352"}, + {file = "black-24.1a1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30a018fc03fd1e83c75d40b8a156ef541d0b56b6403b63754e1cc96889849d9"}, + {file = "black-24.1a1-cp312-cp312-win_amd64.whl", hash = "sha256:88d1c60bac2044a409154e895abb9d74c8ff5d034fb70f3e1f7c3ae96206bc0c"}, + {file = "black-24.1a1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4de8ba5825588017f90e63d7a25fc4df33a6342d1f4d628ad76130d8f4488fc6"}, + {file = "black-24.1a1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c86ecd9d3da3d91e96da5f4a43d9c4fe35c5698b0633e91f171ba9468d112a8b"}, + {file = "black-24.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:623efdb54e7290ba75f7b822dfd2d8a47a55e721ae63aab671ccfd46b2ba6c5d"}, + {file = "black-24.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ec345caf15ae2c61540812500979e92f2989c6b6d4d13d21bdc82908043b3265"}, + {file = "black-24.1a1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac226f37fc429b386d6447df6256dc958c28dd602f86f950072febf886995f80"}, + {file = "black-24.1a1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cad114d8673adab76b3602c28c461c613b7be3da28415500e42aed47415eb561"}, + {file = "black-24.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a054dbb8947718820be2ed6953d66b912ec2795f282725efdd08381a11b0d0"}, + {file = "black-24.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:b03cdf8a4e15929adf47e5e40a0ddeea1d63b65cf59c22553c12417a0c7ccbf4"}, + {file = "black-24.1a1-py3-none-any.whl", hash = "sha256:a2c977909557439d0f17dc82adaea84e48374950d53416efc0b8451a594d42c3"}, + {file = "black-24.1a1.tar.gz", hash = "sha256:4a159ae57f239f3f1ef6a78784b00c1c617c7bb188cc351b3017b9e0702df11c"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "flake8-broken-line" +version = "1.0.0" +description = "Flake8 plugin to forbid backslashes for line breaks" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "flake8_broken_line-1.0.0-py3-none-any.whl", hash = "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9"}, + {file = "flake8_broken_line-1.0.0.tar.gz", hash = "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85"}, +] + +[package.dependencies] +flake8 = ">5" + +[[package]] +name = "flake8-bugbear" +version = "23.3.12" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-bugbear-23.3.12.tar.gz", hash = "sha256:e3e7f74c8a49ad3794a7183353026dabd68c74030d5f46571f84c1fb0eb79363"}, + {file = "flake8_bugbear-23.3.12-py3-none-any.whl", hash = "sha256:beb5c7efcd7ccc2039ef66a77bb8db925e7be3531ff1cb4d0b7030d0e2113d72"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] + +[[package]] +name = "flake8-coding" +version = "1.3.2" +description = "Adds coding magic comment checks to flake8" +optional = false +python-versions = "*" +files = [ + {file = "flake8-coding-1.3.2.tar.gz", hash = "sha256:b8f4d5157a8f74670e6cfea732c3d9f4291a4e994c8701d2c55f787c6e6cb741"}, + {file = "flake8_coding-1.3.2-py2.py3-none-any.whl", hash = "sha256:79704112c44d09d4ab6c8965e76a20c3f7073d52146db60303bce777d9612260"}, +] + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-isort" +version = "6.1.1" +description = "flake8 plugin that integrates isort" +optional = false +python-versions = ">=3.8" +files = [ + {file = "flake8_isort-6.1.1-py3-none-any.whl", hash = "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12"}, + {file = "flake8_isort-6.1.1.tar.gz", hash = "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3"}, +] + +[package.dependencies] +flake8 = "*" +isort = ">=5.0.0,<6" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "flake8-tidy-imports" +version = "4.10.0" +description = "A flake8 plugin that helps you write tidier imports." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flake8_tidy_imports-4.10.0-py3-none-any.whl", hash = "sha256:b0387fb2ea200441bd142309e716fb7b8f4b0937bdf5f8b7c0c118a5f5e2b8ed"}, + {file = "flake8_tidy_imports-4.10.0.tar.gz", hash = "sha256:bd6cf86465402d2b86903009b748d85a628e599e17b76e810c9857e3a2815173"}, +] + +[package.dependencies] +flake8 = ">=3.8.0" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = true +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = true +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy" +version = "1.8.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "orjson" +version = "3.9.9" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = true +python-versions = ">=3.8" +files = [ + {file = "orjson-3.9.9-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f28090060a31f4d11221f9ba48b2273b0d04b702f4dcaa197c38c64ce639cc51"}, + {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8038ba245d0c0a6337cfb6747ea0c51fe18b0cf1a4bc943d530fd66799fae33d"}, + {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:543b36df56db195739c70d645ecd43e49b44d5ead5f8f645d2782af118249b37"}, + {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e7877256b5092f1e4e48fc0f1004728dc6901e7a4ffaa4acb0a9578610aa4ce"}, + {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b83e0d8ba4ca88b894c3e00efc59fe6d53d9ffb5dbbb79d437a466fc1a513d"}, + {file = "orjson-3.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef06431f021453a47a9abb7f7853f04f031d31fbdfe1cc83e3c6aadde502cce"}, + {file = "orjson-3.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0a1a4d9e64597e550428ba091e51a4bcddc7a335c8f9297effbfa67078972b5c"}, + {file = "orjson-3.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:879d2d1f6085c9c0831cec6716c63aaa89e41d8e036cabb19a315498c173fcc6"}, + {file = "orjson-3.9.9-cp310-none-win32.whl", hash = "sha256:d3f56e41bc79d30fdf077073072f2377d2ebf0b946b01f2009ab58b08907bc28"}, + {file = "orjson-3.9.9-cp310-none-win_amd64.whl", hash = "sha256:ab7bae2b8bf17620ed381e4101aeeb64b3ba2a45fc74c7617c633a923cb0f169"}, + {file = "orjson-3.9.9-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:31d676bc236f6e919d100fb85d0a99812cff1ebffaa58106eaaec9399693e227"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:678ffb5c0a6b1518b149cc328c610615d70d9297e351e12c01d0beed5d65360f"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71b0cc21f2c324747bc77c35161e0438e3b5e72db6d3b515310457aba743f7f"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae72621f216d1d990468291b1ec153e1b46e0ed188a86d54e0941f3dabd09ee8"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:512e5a41af008e76451f5a344941d61f48dddcf7d7ddd3073deb555de64596a6"}, + {file = "orjson-3.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f89dc338a12f4357f5bf1b098d3dea6072fb0b643fd35fec556f4941b31ae27"}, + {file = "orjson-3.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:957a45fb201c61b78bcf655a16afbe8a36c2c27f18a998bd6b5d8a35e358d4ad"}, + {file = "orjson-3.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1c01cf4b8e00c7e98a0a7cf606a30a26c32adf2560be2d7d5d6766d6f474b31"}, + {file = "orjson-3.9.9-cp311-none-win32.whl", hash = "sha256:397a185e5dd7f8ebe88a063fe13e34d61d394ebb8c70a443cee7661b9c89bda7"}, + {file = "orjson-3.9.9-cp311-none-win_amd64.whl", hash = "sha256:24301f2d99d670ded4fb5e2f87643bc7428a54ba49176e38deb2887e42fe82fb"}, + {file = "orjson-3.9.9-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd55ea5cce3addc03f8fb0705be0cfed63b048acc4f20914ce5e1375b15a293b"}, + {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b28c1a65cd13fff5958ab8b350f0921121691464a7a1752936b06ed25c0c7b6e"}, + {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b97a67c47840467ccf116136450c50b6ed4e16a8919c81a4b4faef71e0a2b3f4"}, + {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75b805549cbbcb963e9c9068f1a05abd0ea4c34edc81f8d8ef2edb7e139e5b0f"}, + {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5424ecbafe57b2de30d3b5736c5d5835064d522185516a372eea069b92786ba6"}, + {file = "orjson-3.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2cd6ef4726ef1b8c63e30d8287225a383dbd1de3424d287b37c1906d8d2855"}, + {file = "orjson-3.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c959550e0705dc9f59de8fca1a316da0d9b115991806b217c82931ac81d75f74"}, + {file = "orjson-3.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ece2d8ed4c34903e7f1b64fb1e448a00e919a4cdb104fc713ad34b055b665fca"}, + {file = "orjson-3.9.9-cp312-none-win_amd64.whl", hash = "sha256:f708ca623287186e5876256cb30599308bce9b2757f90d917b7186de54ce6547"}, + {file = "orjson-3.9.9-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:335406231f9247f985df045f0c0c8f6b6d5d6b3ff17b41a57c1e8ef1a31b4d04"}, + {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b5440a5d215d9e1cfd4aee35fd4101a8b8ceb8329f549c16e3894ed9f18b5"}, + {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e98ca450cb4fb176dd572ce28c6623de6923752c70556be4ef79764505320acb"}, + {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3bf6ca6bce22eb89dd0650ef49c77341440def966abcb7a2d01de8453df083a"}, + {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb50d869b3c97c7c5187eda3759e8eb15deb1271d694bc5d6ba7040db9e29036"}, + {file = "orjson-3.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fcf06c69ccc78e32d9f28aa382ab2ab08bf54b696dbe00ee566808fdf05da7d"}, + {file = "orjson-3.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9a4402e7df1b5c9a4c71c7892e1c8f43f642371d13c73242bda5964be6231f95"}, + {file = "orjson-3.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b20becf50d4aec7114dc902b58d85c6431b3a59b04caa977e6ce67b6fee0e159"}, + {file = "orjson-3.9.9-cp38-none-win32.whl", hash = "sha256:1f352117eccac268a59fedac884b0518347f5e2b55b9f650c2463dd1e732eb61"}, + {file = "orjson-3.9.9-cp38-none-win_amd64.whl", hash = "sha256:c4eb31a8e8a5e1d9af5aa9e247c2a52ad5cf7e968aaa9aaefdff98cfcc7f2e37"}, + {file = "orjson-3.9.9-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4a308aeac326c2bafbca9abbae1e1fcf682b06e78a54dad0347b760525838d85"}, + {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e159b97f5676dcdac0d0f75ec856ef5851707f61d262851eb41a30e8fadad7c9"}, + {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f692e7aabad92fa0fff5b13a846fb586b02109475652207ec96733a085019d80"}, + {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cffb77cf0cd3cbf20eb603f932e0dde51b45134bdd2d439c9f57924581bb395b"}, + {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c63eca397127ebf46b59c9c1fb77b30dd7a8fc808ac385e7a58a7e64bae6e106"}, + {file = "orjson-3.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f0c024a75e8ba5d9101facb4fb5a028cdabe3cdfe081534f2a9de0d5062af2"}, + {file = "orjson-3.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8cba20c9815c2a003b8ca4429b0ad4aa87cb6649af41365821249f0fd397148e"}, + {file = "orjson-3.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:906cac73b7818c20cf0f6a7dde5a6f009c52aecc318416c7af5ea37f15ca7e66"}, + {file = "orjson-3.9.9-cp39-none-win32.whl", hash = "sha256:50232572dd300c49f134838c8e7e0917f29a91f97dbd608d23f2895248464b7f"}, + {file = "orjson-3.9.9-cp39-none-win_amd64.whl", hash = "sha256:920814e02e3dd7af12f0262bbc18b9fe353f75a0d0c237f6a67d270da1a1bb44"}, + {file = "orjson-3.9.9.tar.gz", hash = "sha256:02e693843c2959befdd82d1ebae8b05ed12d1cb821605d5f9fe9f98ca5c9fd2b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = true +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = true +python-versions = ">=3.7" +files = [ + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, +] + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] + +[[package]] +name = "pycryptodome" +version = "3.16.0" +description = "Cryptographic library for Python" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodome-3.16.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e061311b02cefb17ea93d4a5eb1ad36dca4792037078b43e15a653a0a4478ead"}, + {file = "pycryptodome-3.16.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:dab9359cc295160ba96738ba4912c675181c84bfdf413e5c0621cf00b7deeeaa"}, + {file = "pycryptodome-3.16.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:0198fe96c22f7bc31e7a7c27a26b2cec5af3cf6075d577295f4850856c77af32"}, + {file = "pycryptodome-3.16.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:58172080cbfaee724067a3c017add6a1a3cc167bbc8478dc5f2e5f45fa658763"}, + {file = "pycryptodome-3.16.0-cp27-cp27m-win32.whl", hash = "sha256:4d950ed2a887905b3fa709b86be5a163e26e1b174703ed59d34eb6832f213222"}, + {file = "pycryptodome-3.16.0-cp27-cp27m-win_amd64.whl", hash = "sha256:c69e19afc734b2a17b9d78b7bcb544aabd5a52ff628e14283b6e9404d27d0517"}, + {file = "pycryptodome-3.16.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1fc16c80a5da8231fd1f953a7b8dfeb415f68120248e8d68383c5c2c4b18708c"}, + {file = "pycryptodome-3.16.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5df582f2112dd72331de7e567837e136a9629181a8ab69ef8949e4bc294a0b99"}, + {file = "pycryptodome-3.16.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:2bf2a270906a02b7b255e1a0d7b3aea4f06b3983c51ddec1673c380e0dff5b30"}, + {file = "pycryptodome-3.16.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b12a88566a98617b1a34b4e5a805dff2da98d83fc74262aff3c3d724d0f525d6"}, + {file = "pycryptodome-3.16.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:69adf32522b75968e1cbf25b5d83e87c04cd9a55610ce1e4a19012e58e7e4023"}, + {file = "pycryptodome-3.16.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d67a2d2fe344953e4572a7d30668cceb516b04287b8638170d562065e53ee2e0"}, + {file = "pycryptodome-3.16.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e750a21d8a265b1f9bfb1a28822995ea33511ba7db5e2b55f41fb30781d0d073"}, + {file = "pycryptodome-3.16.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:47c71a0347847b747ba1349767b16cde049bc36f21654eb09cc82306ef5fdcf8"}, + {file = "pycryptodome-3.16.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:856ebf822d08d754af62c22e2b93626509a72773214f92db1551e2b68d9e2a1b"}, + {file = "pycryptodome-3.16.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6016269bb56caf0327f6d42e7bad1247e08b78407446dff562240c65f85d5a5e"}, + {file = "pycryptodome-3.16.0-cp35-abi3-win32.whl", hash = "sha256:1047ac2b9847ae84ea454e6e20db7dcb755a81c1b1631a879213d2b0ad835ff2"}, + {file = "pycryptodome-3.16.0-cp35-abi3-win_amd64.whl", hash = "sha256:13b3e610a2f8938c61a90b20625069ab7a77ccea20d65a9a0f926cc0cc1314b1"}, + {file = "pycryptodome-3.16.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:265bfcbbf20d58e6871ce695a7a08aac9b41a0553060d9c05363abd6f3391bdd"}, + {file = "pycryptodome-3.16.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:54d807314c66785c69cd25425933d4bd4c23547a593cdcf49d962fa3e0081336"}, + {file = "pycryptodome-3.16.0-pp27-pypy_73-win32.whl", hash = "sha256:63165fbdc247450017eb9ef04cfe15cb3a72ca48ffcc3a3b75b08c0340bf3647"}, + {file = "pycryptodome-3.16.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:95069fd9e2813668a2713a1efcc65cc26d2c7e741401ac46628f1ec957511f1b"}, + {file = "pycryptodome-3.16.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d1daec4d31bb00918e4e178297ac6ca6f86ec4c851ba584770533ece554d29e2"}, + {file = "pycryptodome-3.16.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:48d99869d58f3979d72f6fa0c50f48d16f14973bc4a3adb0ce3b8325fdd7e223"}, + {file = "pycryptodome-3.16.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c82e3bc1e70dde153b0956bffe20a15715a1fe3e00bc23e88d6973eda4505944"}, + {file = "pycryptodome-3.16.0.tar.gz", hash = "sha256:0e45d2d852a66ecfb904f090c3f87dc0dfb89a499570abad8590f10d9cffb350"}, +] + +[[package]] +name = "pydantic" +version = "2.4.2" +description = "Data validation using Python type hints" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.4.2-py3-none-any.whl", hash = "sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"}, + {file = "pydantic-2.4.2.tar.gz", hash = "sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.10.1" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.10.1" +description = "" +optional = true +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:d64728ee14e667ba27c66314b7d880b8eeb050e58ffc5fec3b7a109f8cddbd63"}, + {file = "pydantic_core-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:48525933fea744a3e7464c19bfede85df4aba79ce90c60b94d8b6e1eddd67096"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef337945bbd76cce390d1b2496ccf9f90b1c1242a3a7bc242ca4a9fc5993427a"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1392e0638af203cee360495fd2cfdd6054711f2db5175b6e9c3c461b76f5175"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0675ba5d22de54d07bccde38997e780044dcfa9a71aac9fd7d4d7a1d2e3e65f7"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128552af70a64660f21cb0eb4876cbdadf1a1f9d5de820fed6421fa8de07c893"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f6e6aed5818c264412ac0598b581a002a9f050cb2637a84979859e70197aa9e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ecaac27da855b8d73f92123e5f03612b04c5632fd0a476e469dfc47cd37d6b2e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3c01c2fb081fced3bbb3da78510693dc7121bb893a1f0f5f4b48013201f362e"}, + {file = "pydantic_core-2.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:92f675fefa977625105708492850bcbc1182bfc3e997f8eecb866d1927c98ae6"}, + {file = "pydantic_core-2.10.1-cp310-none-win32.whl", hash = "sha256:420a692b547736a8d8703c39ea935ab5d8f0d2573f8f123b0a294e49a73f214b"}, + {file = "pydantic_core-2.10.1-cp310-none-win_amd64.whl", hash = "sha256:0880e239827b4b5b3e2ce05e6b766a7414e5f5aedc4523be6b68cfbc7f61c5d0"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:073d4a470b195d2b2245d0343569aac7e979d3a0dcce6c7d2af6d8a920ad0bea"}, + {file = "pydantic_core-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:600d04a7b342363058b9190d4e929a8e2e715c5682a70cc37d5ded1e0dd370b4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39215d809470f4c8d1881758575b2abfb80174a9e8daf8f33b1d4379357e417c"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eeb3d3d6b399ffe55f9a04e09e635554012f1980696d6b0aca3e6cf42a17a03b"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a7902bf75779bc12ccfc508bfb7a4c47063f748ea3de87135d433a4cca7a2f"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3625578b6010c65964d177626fde80cf60d7f2e297d56b925cb5cdeda6e9925a"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa48fc31fc7243e50188197b5f0c4228956f97b954f76da157aae7f67269ae8"}, + {file = "pydantic_core-2.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07ec6d7d929ae9c68f716195ce15e745b3e8fa122fc67698ac6498d802ed0fa4"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6f31a17acede6a8cd1ae2d123ce04d8cca74056c9d456075f4f6f85de055607"}, + {file = "pydantic_core-2.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d8f1ebca515a03e5654f88411420fea6380fc841d1bea08effb28184e3d4899f"}, + {file = "pydantic_core-2.10.1-cp311-none-win32.whl", hash = "sha256:6db2eb9654a85ada248afa5a6db5ff1cf0f7b16043a6b070adc4a5be68c716d6"}, + {file = "pydantic_core-2.10.1-cp311-none-win_amd64.whl", hash = "sha256:4a5be350f922430997f240d25f8219f93b0c81e15f7b30b868b2fddfc2d05f27"}, + {file = "pydantic_core-2.10.1-cp311-none-win_arm64.whl", hash = "sha256:5fdb39f67c779b183b0c853cd6b45f7db84b84e0571b3ef1c89cdb1dfc367325"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1f22a9ab44de5f082216270552aa54259db20189e68fc12484873d926426921"}, + {file = "pydantic_core-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8572cadbf4cfa95fb4187775b5ade2eaa93511f07947b38f4cd67cf10783b118"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9a28c063c7c00844ae42a80203eb6d2d6bbb97070cfa00194dff40e6f545ab"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e2a35baa428181cb2270a15864ec6286822d3576f2ed0f4cd7f0c1708472aff"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05560ab976012bf40f25d5225a58bfa649bb897b87192a36c6fef1ab132540d7"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6495008733c7521a89422d7a68efa0a0122c99a5861f06020ef5b1f51f9ba7c"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ac492c686defc8e6133e3a2d9eaf5261b3df26b8ae97450c1647286750b901"}, + {file = "pydantic_core-2.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8282bab177a9a3081fd3d0a0175a07a1e2bfb7fcbbd949519ea0980f8a07144d"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:aafdb89fdeb5fe165043896817eccd6434aee124d5ee9b354f92cd574ba5e78f"}, + {file = "pydantic_core-2.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f6defd966ca3b187ec6c366604e9296f585021d922e666b99c47e78738b5666c"}, + {file = "pydantic_core-2.10.1-cp312-none-win32.whl", hash = "sha256:7c4d1894fe112b0864c1fa75dffa045720a194b227bed12f4be7f6045b25209f"}, + {file = "pydantic_core-2.10.1-cp312-none-win_amd64.whl", hash = "sha256:5994985da903d0b8a08e4935c46ed8daf5be1cf217489e673910951dc533d430"}, + {file = "pydantic_core-2.10.1-cp312-none-win_arm64.whl", hash = "sha256:0d8a8adef23d86d8eceed3e32e9cca8879c7481c183f84ed1a8edc7df073af94"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9badf8d45171d92387410b04639d73811b785b5161ecadabf056ea14d62d4ede"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:ebedb45b9feb7258fac0a268a3f6bec0a2ea4d9558f3d6f813f02ff3a6dc6698"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfe1090245c078720d250d19cb05d67e21a9cd7c257698ef139bc41cf6c27b4f"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e357571bb0efd65fd55f18db0a2fb0ed89d0bb1d41d906b138f088933ae618bb"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3dcd587b69bbf54fc04ca157c2323b8911033e827fffaecf0cafa5a892a0904"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c120c9ce3b163b985a3b966bb701114beb1da4b0468b9b236fc754783d85aa3"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15d6bca84ffc966cc9976b09a18cf9543ed4d4ecbd97e7086f9ce9327ea48891"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cabb9710f09d5d2e9e2748c3e3e20d991a4c5f96ed8f1132518f54ab2967221"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:82f55187a5bebae7d81d35b1e9aaea5e169d44819789837cdd4720d768c55d15"}, + {file = "pydantic_core-2.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1d40f55222b233e98e3921df7811c27567f0e1a4411b93d4c5c0f4ce131bc42f"}, + {file = "pydantic_core-2.10.1-cp37-none-win32.whl", hash = "sha256:14e09ff0b8fe6e46b93d36a878f6e4a3a98ba5303c76bb8e716f4878a3bee92c"}, + {file = "pydantic_core-2.10.1-cp37-none-win_amd64.whl", hash = "sha256:1396e81b83516b9d5c9e26a924fa69164156c148c717131f54f586485ac3c15e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6835451b57c1b467b95ffb03a38bb75b52fb4dc2762bb1d9dbed8de31ea7d0fc"}, + {file = "pydantic_core-2.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b00bc4619f60c853556b35f83731bd817f989cba3e97dc792bb8c97941b8053a"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fa467fd300a6f046bdb248d40cd015b21b7576c168a6bb20aa22e595c8ffcdd"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d99277877daf2efe074eae6338453a4ed54a2d93fb4678ddfe1209a0c93a2468"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aad7bd686363d1ce4ee930ad39f14e1673248373f4a9d74d2b9554f06199fb58"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:443fed67d33aa85357464f297e3d26e570267d1af6fef1c21ca50921d2976302"}, + {file = "pydantic_core-2.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:042462d8d6ba707fd3ce9649e7bf268633a41018d6a998fb5fbacb7e928a183e"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecdbde46235f3d560b18be0cb706c8e8ad1b965e5c13bbba7450c86064e96561"}, + {file = "pydantic_core-2.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ed550ed05540c03f0e69e6d74ad58d026de61b9eaebebbaaf8873e585cbb18de"}, + {file = "pydantic_core-2.10.1-cp38-none-win32.whl", hash = "sha256:8cdbbd92154db2fec4ec973d45c565e767ddc20aa6dbaf50142676484cbff8ee"}, + {file = "pydantic_core-2.10.1-cp38-none-win_amd64.whl", hash = "sha256:9f6f3e2598604956480f6c8aa24a3384dbf6509fe995d97f6ca6103bb8c2534e"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:655f8f4c8d6a5963c9a0687793da37b9b681d9ad06f29438a3b2326d4e6b7970"}, + {file = "pydantic_core-2.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e570ffeb2170e116a5b17e83f19911020ac79d19c96f320cbfa1fa96b470185b"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64322bfa13e44c6c30c518729ef08fda6026b96d5c0be724b3c4ae4da939f875"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485a91abe3a07c3a8d1e082ba29254eea3e2bb13cbbd4351ea4e5a21912cc9b0"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5cb87bdc2e5f620693148b5f8f842d293cae46c5f15a1b1bf7ceeed324a740c"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25bd966103890ccfa028841a8f30cebcf5875eeac8c4bde4fe221364c92f0c9a"}, + {file = "pydantic_core-2.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f323306d0556351735b54acbf82904fe30a27b6a7147153cbe6e19aaaa2aa429"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c27f38dc4fbf07b358b2bc90edf35e82d1703e22ff2efa4af4ad5de1b3833e7"}, + {file = "pydantic_core-2.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f1365e032a477c1430cfe0cf2856679529a2331426f8081172c4a74186f1d595"}, + {file = "pydantic_core-2.10.1-cp39-none-win32.whl", hash = "sha256:a1c311fd06ab3b10805abb72109f01a134019739bd3286b8ae1bc2fc4e50c07a"}, + {file = "pydantic_core-2.10.1-cp39-none-win_amd64.whl", hash = "sha256:ae8a8843b11dc0b03b57b52793e391f0122e740de3df1474814c700d2622950a"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d43002441932f9a9ea5d6f9efaa2e21458221a3a4b417a14027a1d530201ef1b"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962ed72424bf1f72334e2f1e61b68f16c0e596f024ca7ac5daf229f7c26e4208"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf5bb4dd67f20f3bbc1209ef572a259027c49e5ff694fa56bed62959b41e1f9"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e544246b859f17373bed915182ab841b80849ed9cf23f1f07b73b7c58baee5fb"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c0877239307b7e69d025b73774e88e86ce82f6ba6adf98f41069d5b0b78bd1bf"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53df009d1e1ba40f696f8995683e067e3967101d4bb4ea6f667931b7d4a01357"}, + {file = "pydantic_core-2.10.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1254357f7e4c82e77c348dabf2d55f1d14d19d91ff025004775e70a6ef40ada"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:524ff0ca3baea164d6d93a32c58ac79eca9f6cf713586fdc0adb66a8cdeab96a"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0ac9fb8608dbc6eaf17956bf623c9119b4db7dbb511650910a82e261e6600f"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:320f14bd4542a04ab23747ff2c8a778bde727158b606e2661349557f0770711e"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63974d168b6233b4ed6a0046296803cb13c56637a7b8106564ab575926572a55"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:417243bf599ba1f1fef2bb8c543ceb918676954734e2dcb82bf162ae9d7bd514"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dda81e5ec82485155a19d9624cfcca9be88a405e2857354e5b089c2a982144b2"}, + {file = "pydantic_core-2.10.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:14cfbb00959259e15d684505263d5a21732b31248a5dd4941f73a3be233865b9"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:631cb7415225954fdcc2a024119101946793e5923f6c4d73a5914d27eb3d3a05"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:bec7dd208a4182e99c5b6c501ce0b1f49de2802448d4056091f8e630b28e9a52"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:149b8a07712f45b332faee1a2258d8ef1fb4a36f88c0c17cb687f205c5dc6e7d"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d966c47f9dd73c2d32a809d2be529112d509321c5310ebf54076812e6ecd884"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7eb037106f5c6b3b0b864ad226b0b7ab58157124161d48e4b30c4a43fef8bc4b"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:154ea7c52e32dce13065dbb20a4a6f0cc012b4f667ac90d648d36b12007fa9f7"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e562617a45b5a9da5be4abe72b971d4f00bf8555eb29bb91ec2ef2be348cd132"}, + {file = "pydantic_core-2.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f23b55eb5464468f9e0e9a9935ce3ed2a870608d5f534025cd5536bca25b1402"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:e9121b4009339b0f751955baf4543a0bfd6bc3f8188f8056b1a25a2d45099934"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:0523aeb76e03f753b58be33b26540880bac5aa54422e4462404c432230543f33"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0e2959ef5d5b8dc9ef21e1a305a21a36e254e6a34432d00c72a92fdc5ecda5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da01bec0a26befab4898ed83b362993c844b9a607a86add78604186297eb047e"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2e9072d71c1f6cfc79a36d4484c82823c560e6f5599c43c1ca6b5cdbd54f881"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f36a3489d9e28fe4b67be9992a23029c3cec0babc3bd9afb39f49844a8c721c5"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f64f82cc3443149292b32387086d02a6c7fb39b8781563e0ca7b8d7d9cf72bd7"}, + {file = "pydantic_core-2.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b4a6db486ac8e99ae696e09efc8b2b9fea67b63c8f88ba7a1a16c24a057a0776"}, + {file = "pydantic_core-2.10.1.tar.gz", hash = "sha256:0f8682dbdd2f67f8e1edddcbffcc29f60a6182b4901c367fc8c1c40d30bb0a82"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = true +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "python-coveralls" +version = "2.9.3" +description = "Python interface to coveralls.io API\n" +optional = false +python-versions = "*" +files = [ + {file = "python-coveralls-2.9.3.tar.gz", hash = "sha256:bfaf7811e7dc5628e83b6b162962a4e2485dbff184b30e49f380374ed1bcee55"}, + {file = "python_coveralls-2.9.3-py2.py3-none-any.whl", hash = "sha256:fb0ff49bb1551dac10b06bd55e9790287d898a0f1e2c959802235cae08dd0bff"}, +] + +[package.dependencies] +coverage = "*" +PyYAML = "*" +requests = "*" +six = "*" + +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = true +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = true +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = true +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "4.2.0" +description = "Python documentation generator" +optional = true +python-versions = ">=3.6" +files = [ + {file = "Sphinx-4.2.0-py3-none-any.whl", hash = "sha256:98a535c62a4fcfcc362528592f69b26f7caec587d32cd55688db580be0287ae0"}, + {file = "Sphinx-4.2.0.tar.gz", hash = "sha256:94078db9184491e15bce0a56d9186e0aec95f16ac20b12d00e06d4e36f1058a6"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +setuptools = "*" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.900)", "types-pkg-resources", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.3.0" +description = "Read the Docs theme for Sphinx" +optional = true +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, + {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, +] + +[package.dependencies] +docutils = "<0.19" +sphinx = ">=1.6,<8" +sphinxcontrib-jquery = ">=4,<5" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = true +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = true +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = true +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[extras] +crypto = ["pycryptodome"] +dev = [] +docs = ["Babel", "Jinja2", "MarkupSafe", "Pygments", "Sphinx", "alabaster", "annotated-types", "certifi", "charset_normalizer", "docutils", "idna", "imagesize", "orjson", "packaging", "pydantic", "pydantic-core", "requests", "snowballstemmer", "sphinx_rtd_theme", "sphinxcontrib-applehelp", "sphinxcontrib-devhelp", "sphinxcontrib-htmlhelp", "sphinxcontrib-jquery", "sphinxcontrib-qthelp", "sphinxcontrib-serializinghtml", "typing-extensions", "typing_extensions", "urllib3"] +postgres = ["psycopg2"] +postgres-dev = ["psycopg2-binary"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8,<4.0" +content-hash = "1dca701c3c8e93734e2bd51b93c3e1275e14dca11dfbb65dcfd41f5aae1a7238" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..3c1bfb29 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,206 @@ +[tool.poetry] +name = "eventsourcing" +version = "9.2.22" + +description = "Event sourcing in Python" +authors = [ + "John Bywater ", +] +license = "BSD 3-Clause" +classifiers = [ +# "Development Status :: 3 - Alpha", +# "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +# "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python", + "Topic :: Software Development :: Libraries :: Python Modules", +] +readme = "README.md" +homepage = "https://github.com/pyeventsourcing/eventsourcing" +repository = "https://github.com/pyeventsourcing/eventsourcing" +include = ["eventsourcing/py.typed"] +packages = [ + { include = "eventsourcing" }, + { include = "eventsourcing/tests" }, +] +keywords=[ + "event sourcing", + "event store", + "domain driven design", + "domain-driven design", + "ddd", + "cqrs", + "cqs", +] + +[tool.poetry.dependencies] +python = ">=3.8,<4.0" +typing_extensions = "*" +"backports.zoneinfo" = { version = "*", python = "<3.9" } + +pycryptodome = { version = "<=3.16.99999", optional = true } + +psycopg2 = { version = "<=2.9.99999", optional = true } +psycopg2-binary = { version = "<=2.9.99999", optional = true } + +Sphinx = { version = "4.2.0", optional = true } +docutils = { version = "0.17.1", optional = true } +sphinx_rtd_theme = { version = "1.3.0", optional = true } +sphinxcontrib-applehelp = { version = "1.0.4", optional = true } +sphinxcontrib-devhelp = { version = "1.0.2", optional = true } +sphinxcontrib-htmlhelp = { version = "2.0.1", optional = true } +sphinxcontrib-jquery = { version = "4.1", optional = true } +sphinxcontrib-qthelp = { version = "1.0.3", optional = true } +sphinxcontrib-serializinghtml = { version = "1.1.5", optional = true } +Jinja2 = { version = "3.1.2", optional = true } +Pygments = { version = "2.16.1", optional = true } +snowballstemmer = { version = "2.2.0", optional = true } +alabaster = { version = "0.7.13", optional = true } +Babel = { version = "2.13.0", optional = true } +imagesize = { version = "1.4.1", optional = true } +requests = { version = "2.31.0", optional = true } +packaging = { version = "23.2", optional = true } +MarkupSafe = { version = "2.1.3", optional = true } +charset_normalizer = { version = "3.3.0", optional = true } +idna = { version = "3.4", optional = true } +urllib3 = { version = "2.0.7", optional = true } +certifi = { version = "2023.7.22", optional = true } + +pydantic = { version = "2.4.2", optional = true } +pydantic-core = { version = "2.10.1", optional = true } +annotated-types = { version = "0.6.0", optional = true } +typing-extensions = { version = "4.8.0", optional = true } + +orjson = { version = "3.9.9", optional = true } + +[tool.poetry.extras] +crypto = ["pycryptodome"] +postgres = ["psycopg2"] +postgres_dev = ["psycopg2-binary"] +docs = [ + "Sphinx", + "docutils", + "sphinx_rtd_theme", + "sphinxcontrib-applehelp", + "sphinxcontrib-devhelp", + "sphinxcontrib-htmlhelp", + "sphinxcontrib-jquery", + "sphinxcontrib-qthelp", + "sphinxcontrib-serializinghtml", + "Jinja2", + "Pygments", + "snowballstemmer", + "alabaster", + "Babel", + "imagesize", + "requests", + "packaging", + "MarkupSafe", + "charset_normalizer", + "idna", + "urllib3", + "certifi", + "pydantic", + "pydantic-core", + "annotated-types", + "typing-extensions", + "orjson", +] +dev = ["dev_requires"] + + + +[tool.poetry.group.dev.dependencies] +black = { version = "*", allow-prereleases = true } +coverage = "^7.2.7" +flake8 = "*" +flake8-broken-line = "*" +flake8-bugbear = "*" +flake8-coding = "*" +flake8-isort = "*" +flake8-tidy-imports = "*" +isort = "*" +mypy = "*" +python-coveralls = "*" + + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 88 +target-version = ["py38"] +include = '\.pyi?$' +preview = true +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.pip-cache + | \.poetry + | \.venv + | \.vscode + | _build + | buck-out + | build + | dist +)/ +''' + +[tool.coverage.run] +branch = true +omit = [ + "esdbclient/protos/*" +] + +[tool.coverage.report] +exclude_lines = [ + # Have to re-enable the standard pragma + "pragma: no cover", +# # Don't complain about missing debug-only code: +# "def __repr__", +# "if self.debug", +# # Don't complain about some magic methods: +# "def __str__", +# # Don't complain if tests don't hit defensive assertion code: +# "raise AssertionError", +# "raise NotImplementedError", +# # Don't complain if non-runnable code isn't run: +# "if 0:", +# "if __name__ == .__main__.:", +# # Don't complain about empty realizations +# "pass", +# # Don't complain about abstract methods +# "@abc.abstractmethod" +] +#ignore_errors = true +#precision = 2 + +[tool.isort] +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +line_length = 88 +combine_as_imports = true +#;default_section = LOCALFOLDER +#;known_first_party = eventsourcing +#;known_standard_library = dataclasses +#;known_third_party = django +#;not_skip = __init__.py +skip = ".eggs,.pip-cache,.poetry,venv,.venv,dist" +profile = "black" +sections = "FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e3e7aded..00000000 --- a/setup.cfg +++ /dev/null @@ -1,42 +0,0 @@ -[metadata] -description_file = README.md - -[mypy-eventsourcing.tests.*] -ignore_errors = True - -[mypy-eventsourcing.tests.application_tests.test_event_sourced_log] -ignore_errors = False - -[mypy-eventsourcing.dispatch] -ignore_errors = True - -[mypy] -ignore_missing_imports = True -incremental = True -follow_imports = normal -warn_redundant_casts = True -warn_unused_ignores = True -strict_optional = True -no_implicit_optional = True -disallow_untyped_defs = True -disallow_any_generics = True - -[isort] -multi_line_output = 3 -include_trailing_comma = True -force_grid_wrap = 0 -use_parentheses = True -line_length = 88 -combine_as_imports = true -;default_section = LOCALFOLDER -;known_first_party = eventsourcing -;known_standard_library = dataclasses -;known_third_party = django -;not_skip = __init__.py -;sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER -skip = .eggs,.pip-cache,venv,.venv - -[flake8] -max-line-length = 88 -select = C,E,F,W,B,B950 -ignore = E203, E501, W503, B027 diff --git a/setup.py b/setup.py deleted file mode 100644 index 179d3f3a..00000000 --- a/setup.py +++ /dev/null @@ -1,132 +0,0 @@ -from setuptools import setup - -from eventsourcing import __version__ - -crypto_requires = ["pycryptodome<=3.16.99999"] -postgresql_requires = ["psycopg2<=2.9.99999"] -postgresql_dev_requires = ["psycopg2-binary<=2.9.99999"] - -docs_requires = ( - postgresql_dev_requires - + crypto_requires - + [ - # "Sphinx==7.2.6", - # "sphinx_rtd_theme==2.2.3", - # "sphinxcontrib-applehelp==1.0.7", - # "sphinxcontrib-devhelp==1.0.5", - # "sphinxcontrib-jsmath==1.0.1", - # "sphinxcontrib-htmlhelp==2.0.4", - # "sphinxcontrib_serializinghtml==1.1.9", - # "sphinxcontrib_qthelp==1.0.6", - # "docutils==0.20.1", - # # "sphinxcontrib-serializinghtml==1.1.4", - - "Sphinx==4.2.0", - "docutils==0.17.1", - "sphinx_rtd_theme==1.3.0", - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-jquery==4.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "Jinja2==3.1.2", - "Pygments==2.16.1", - "snowballstemmer==2.2.0", - "alabaster==0.7.13", - "Babel==2.13.0", - "imagesize==1.4.1", - "requests==2.31.0", - "packaging==23.2", - "MarkupSafe==2.1.3", - "charset_normalizer==3.3.0", - "idna==3.4", - "urllib3==2.0.7", - "certifi==2023.7.22", - - "pydantic==2.4.2", - "pydantic-core==2.10.1", - "annotated-types==0.6.0", - "typing-extensions==4.8.0", - - "orjson==3.9.9", - ] - -) - -dev_requires = docs_requires + [ - "python-coveralls", - "coverage", - "black", - "mypy", - "flake8", - "flake8-bugbear", - "isort", - 'backports.zoneinfo;python_version<"3.9"', -] - -from pathlib import Path - -this_directory = Path(__file__).parent -readme_text = (this_directory / "README.md").read_text() -parts = readme_text.partition("A library for event sourcing in Python.") -long_description = "".join(parts[1:]) - - -packages = [ - "eventsourcing", - "eventsourcing.tests", -] - - -setup( - name="eventsourcing", - version=__version__, - description="Event sourcing in Python", - author="John Bywater", - author_email="john.bywater@appropriatesoftware.net", - url="https://github.com/pyeventsourcing/eventsourcing", - license="BSD-3-Clause", - packages=packages, - package_data={"eventsourcing": ["py.typed"]}, - python_requires=">=3.7", - install_requires=[ - 'typing_extensions;python_version<"3.8"', - ], - extras_require={ - "postgres": postgresql_requires, - "postgres_dev": postgresql_dev_requires, - "crypto": crypto_requires, - "docs": docs_requires, - "dev": dev_requires, - }, - zip_safe=False, - long_description=long_description, - long_description_content_type="text/markdown", - keywords=[ - "event sourcing", - "event store", - "domain driven design", - "domain-driven design", - "ddd", - "cqrs", - "cqs", - ], - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Intended Audience :: Education", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: BSD License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: Implementation :: CPython", - "Topic :: Software Development :: Libraries :: Python Modules", - ], -) From 7e3b65a6642ce32e0ae305192648c1e98d2f021b Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sun, 21 Jan 2024 04:55:57 +0000 Subject: [PATCH 106/107] Reformatted with Black (slightly different settings). --- .flake8 | 2 +- eventsourcing/application.py | 32 ++++----- eventsourcing/domain.py | 6 +- .../examples/cargoshipping/domainmodel.py | 3 +- .../contentmanagementsystem/application.py | 28 ++++---- .../examples/searchablecontent/postgres.py | 8 +-- .../examples/searchablecontent/sqlite.py | 26 +++---- .../examples/searchabletimestamps/postgres.py | 4 +- .../examples/searchabletimestamps/sqlite.py | 4 +- eventsourcing/interface.py | 52 +++++++------- eventsourcing/postgres.py | 68 ++++++++++--------- eventsourcing/sqlite.py | 22 +++--- eventsourcing/system.py | 6 +- eventsourcing/tests/application.py | 24 +++---- eventsourcing/tests/docs_tests/test_docs.py | 7 +- .../tests/domain_tests/test_aggregate.py | 22 +++--- .../test_remotenotificationlog.py | 10 ++- eventsourcing/tests/persistence.py | 22 +++--- .../tests/persistence_tests/test_postgres.py | 10 ++- .../tests/system_tests/test_runner.py | 54 +++++++-------- eventsourcing/tests/utils_tests/test_utils.py | 3 +- eventsourcing/utils.py | 8 +-- 22 files changed, 196 insertions(+), 225 deletions(-) diff --git a/.flake8 b/.flake8 index 28289ed1..3aba7e31 100644 --- a/.flake8 +++ b/.flake8 @@ -2,7 +2,7 @@ exclude = .git,__pycache__,.eggs,*.egg,.pip-cache,.poetry,.venv,dist,*_pb2.py,*_pb2_grpc.py max-line-length = 88 select = C,E,F,W,B,B950 -ignore = C101, E203, E501, W503, B027 +ignore = C101, E203, E501, W503, B027, E704 # ignore = E203,E266,E501,W503,B907,E231 diff --git a/eventsourcing/application.py b/eventsourcing/application.py index 09089ce1..18ce6739 100644 --- a/eventsourcing/application.py +++ b/eventsourcing/application.py @@ -637,9 +637,9 @@ class Application: name = "Application" env: EnvType = {} is_snapshotting_enabled: bool = False - snapshotting_intervals: Optional[ - Dict[Type[MutableOrImmutableAggregate], int] - ] = None + snapshotting_intervals: Optional[Dict[Type[MutableOrImmutableAggregate], int]] = ( + None + ) snapshotting_projectors: Optional[ Dict[Type[MutableOrImmutableAggregate], ProjectorFunction[Any, Any]] ] = None @@ -677,9 +677,9 @@ def __init__(self, env: Optional[EnvType] = None) -> None: self._repository = self.construct_repository() self._notification_log = self.construct_notification_log() self.closing = Event() - self.previous_max_notification_id: Optional[ - int - ] = self.recorder.max_notification_id() + self.previous_max_notification_id: Optional[int] = ( + self.recorder.max_notification_id() + ) @property def repository(self) -> Repository: @@ -863,17 +863,15 @@ def _take_snapshots(self, processing_event: ProcessingEvent) -> None: event, CanMutateProtocol ): raise ProgrammingError( - ( - f"Cannot take snapshot for {type(aggregate)} with " - "default project_aggregate() function, because its " - f"domain event {type(event)} does not implement " - "the 'can mutate' protocol (see CanMutateProtocol)." - f" Please define application class {type(self)}" - " with class variable 'snapshotting_projectors', " - f"to be a dict that has {type(aggregate)} as a key " - "with the aggregate projector function for " - f"{type(aggregate)} as the value for that key." - ) + f"Cannot take snapshot for {type(aggregate)} with " + "default project_aggregate() function, because its " + f"domain event {type(event)} does not implement " + "the 'can mutate' protocol (see CanMutateProtocol)." + f" Please define application class {type(self)}" + " with class variable 'snapshotting_projectors', " + f"to be a dict that has {type(aggregate)} as a key " + "with the aggregate projector function for " + f"{type(aggregate)} as the value for that key." ) self.take_snapshot( aggregate_id=event.originator_id, diff --git a/eventsourcing/domain.py b/eventsourcing/domain.py index beb31cfb..7bd3321f 100644 --- a/eventsourcing/domain.py +++ b/eventsourcing/domain.py @@ -801,7 +801,7 @@ def _spec_coerce_args_to_kwargs( for name in kwargs_keys: if name not in required_keyword_only and name not in positional_names: raise TypeError( - f"{method_name}() got an unexpected " f"keyword argument '{name}'" + f"{method_name}() got an unexpected keyword argument '{name}'" ) if len_args > len(positional_names): msg = ( @@ -843,7 +843,7 @@ def _spec_coerce_args_to_kwargs( missing_names = [f"'{name}'" for name in missing_keyword_only_arguments] msg = ( f"{method_name}() missing {len(missing_names)} " - f"required keyword-only argument" + "required keyword-only argument" f"{'' if len(missing_names) == 1 else 's'}: " ) _raise_missing_names_type_error(missing_names, msg) @@ -1130,7 +1130,7 @@ def __init__( method_name = event_decorator.decorated_method.__name__ raise TypeError( f"@event under {method_name}() property setter requires " - f"event class name" + "event class name" ) if event_decorator is not None: diff --git a/eventsourcing/examples/cargoshipping/domainmodel.py b/eventsourcing/examples/cargoshipping/domainmodel.py index b41987ed..dcdbd618 100644 --- a/eventsourcing/examples/cargoshipping/domainmodel.py +++ b/eventsourcing/examples/cargoshipping/domainmodel.py @@ -289,8 +289,7 @@ def _(self, event: Cargo.HandlingEventRegistered) -> None: break else: raise Exception( - "Can't find leg with origin={} and " - "voyage_number={}".format( + "Can't find leg with origin={} and voyage_number={}".format( event.location, event.voyage_number, ) diff --git a/eventsourcing/examples/contentmanagementsystem/application.py b/eventsourcing/examples/contentmanagementsystem/application.py index ece94573..17ca8ae5 100644 --- a/eventsourcing/examples/contentmanagementsystem/application.py +++ b/eventsourcing/examples/contentmanagementsystem/application.py @@ -24,27 +24,23 @@ def policy( processing_event: ProcessingEvent, ) -> None: if isinstance(domain_event, Page.Created): - processing_event.saved_kwargs["insert_pages"] = [ - ( - domain_event.originator_id, - domain_event.slug, - domain_event.title, - domain_event.body, - ) - ] + processing_event.saved_kwargs["insert_pages"] = [( + domain_event.originator_id, + domain_event.slug, + domain_event.title, + domain_event.body, + )] elif isinstance(domain_event, Page.BodyUpdated): recorder = cast(SearchableContentRecorder, self.recorder) page_id = domain_event.originator_id page_slug, page_title, page_body = recorder.select_page(page_id) page_body = apply_patch(page_body, domain_event.diff) - processing_event.saved_kwargs["update_pages"] = [ - ( - page_id, - page_slug, - page_title, - page_body, - ) - ] + processing_event.saved_kwargs["update_pages"] = [( + page_id, + page_slug, + page_title, + page_body, + )] def search(self, query: str) -> List[UUID]: recorder = cast(SearchableContentRecorder, self.recorder) diff --git a/eventsourcing/examples/searchablecontent/postgres.py b/eventsourcing/examples/searchablecontent/postgres.py index ffcc3900..18050454 100644 --- a/eventsourcing/examples/searchablecontent/postgres.py +++ b/eventsourcing/examples/searchablecontent/postgres.py @@ -24,7 +24,7 @@ class PostgresSearchableContentRecorder( pages_table_name = "pages_projection_example" select_page_statement = ( f"SELECT page_slug, page_title, page_body FROM {pages_table_name}" - f" WHERE page_id = $1" + " WHERE page_id = $1" ) select_page_statement_name = f"select_{pages_table_name}".replace(".", "_") @@ -34,13 +34,13 @@ class PostgresSearchableContentRecorder( update_page_statement = ( f"UPDATE {pages_table_name} " - f"SET page_slug = $1, page_title = $2, page_body = $3 WHERE page_id = $4" + "SET page_slug = $1, page_title = $2, page_body = $3 WHERE page_id = $4" ) update_page_statement_name = f"update_{pages_table_name}".replace(".", "_") search_pages_statement = ( f"SELECT page_id FROM {pages_table_name} WHERE " - f"to_tsvector('english', page_body) @@ websearch_to_tsquery('english', $1)" + "to_tsvector('english', page_body) @@ websearch_to_tsquery('english', $1)" ) search_pages_statement_name = f"search_{pages_table_name}".replace(".", "_") @@ -59,7 +59,7 @@ def construct_create_table_statements(self) -> List[str]: statements.append( f"CREATE INDEX IF NOT EXISTS {self.pages_table_name}_idx " f"ON {self.pages_table_name} " - f"USING GIN (to_tsvector('english', page_body))" + "USING GIN (to_tsvector('english', page_body))" ) return statements diff --git a/eventsourcing/examples/searchablecontent/sqlite.py b/eventsourcing/examples/searchablecontent/sqlite.py index 4d3181b0..480f085d 100644 --- a/eventsourcing/examples/searchablecontent/sqlite.py +++ b/eventsourcing/examples/searchablecontent/sqlite.py @@ -22,16 +22,16 @@ class SQLiteSearchableContentRecorder( pages_table_name = "pages_projection_example" pages_virtual_table_name = pages_table_name + "_fts" select_page_statement = ( - f"SELECT page_slug, page_title, page_body FROM " + "SELECT page_slug, page_title, page_body FROM " f"{pages_table_name} WHERE page_id = ?" ) insert_page_statement = f"INSERT INTO {pages_table_name} VALUES (?, ?, ?, ?)" update_page_statement = ( f"UPDATE {pages_table_name} " - f"SET page_slug = ?, page_title = ?, page_body = ? WHERE page_id = ?" + "SET page_slug = ?, page_title = ?, page_body = ? WHERE page_id = ?" ) search_pages_statement = ( - f"SELECT page_id FROM {pages_virtual_table_name} WHERE " f"page_body MATCH ?" + f"SELECT page_id FROM {pages_virtual_table_name} WHERE page_body MATCH ?" ) def construct_create_table_statements(self) -> List[str]: @@ -51,24 +51,24 @@ def construct_create_table_statements(self) -> List[str]: f"page_id, page_body, content='{self.pages_table_name}')" ) statements.append( - f"CREATE TRIGGER projection_ai AFTER INSERT ON " + "CREATE TRIGGER projection_ai AFTER INSERT ON " f"{self.pages_table_name} BEGIN " f"INSERT INTO {self.pages_virtual_table_name} " - f"(rowid, page_id, page_body) " - f"VALUES (new.rowid, new.page_id, new.page_body); " - f"END" + "(rowid, page_id, page_body) " + "VALUES (new.rowid, new.page_id, new.page_body); " + "END" ) statements.append( - f"CREATE TRIGGER projection_au AFTER UPDATE ON " + "CREATE TRIGGER projection_au AFTER UPDATE ON " f"{self.pages_table_name} " - f"BEGIN " + "BEGIN " f"INSERT INTO {self.pages_virtual_table_name} " f"({self.pages_virtual_table_name}, rowid, page_id, page_body) " - f"VALUES ('delete', old.rowid, old.page_id, old.page_body);" + "VALUES ('delete', old.rowid, old.page_id, old.page_body);" f"INSERT INTO {self.pages_virtual_table_name} " - f"(rowid, page_id, page_body) " - f"VALUES (new.rowid, new.page_id, new.page_body); " - f"END" + "(rowid, page_id, page_body) " + "VALUES (new.rowid, new.page_id, new.page_body); " + "END" ) return statements diff --git a/eventsourcing/examples/searchabletimestamps/postgres.py b/eventsourcing/examples/searchabletimestamps/postgres.py index 28d2451b..794d12e0 100644 --- a/eventsourcing/examples/searchabletimestamps/postgres.py +++ b/eventsourcing/examples/searchabletimestamps/postgres.py @@ -36,8 +36,8 @@ def __init__( ) self.select_event_timestamp_statement = ( f"SELECT originator_version FROM {self.event_timestamps_table_name} WHERE " - f"originator_id = $1 AND " - f"timestamp <= $2 " + "originator_id = $1 AND " + "timestamp <= $2 " "ORDER BY originator_version DESC " "LIMIT 1" ) diff --git a/eventsourcing/examples/searchabletimestamps/sqlite.py b/eventsourcing/examples/searchabletimestamps/sqlite.py index 5442b785..c397c073 100644 --- a/eventsourcing/examples/searchabletimestamps/sqlite.py +++ b/eventsourcing/examples/searchabletimestamps/sqlite.py @@ -31,8 +31,8 @@ def __init__( ) self.select_event_timestamp_statement = ( f"SELECT originator_version FROM {self.event_timestamps_table_name} WHERE " - f"originator_id = ? AND " - f"timestamp <= ? " + "originator_id = ? AND " + "timestamp <= ? " "ORDER BY originator_version DESC " "LIMIT 1" ) diff --git a/eventsourcing/interface.py b/eventsourcing/interface.py index 77c3289c..cff2ca3f 100644 --- a/eventsourcing/interface.py +++ b/eventsourcing/interface.py @@ -50,22 +50,20 @@ def get_log_section(self, section_id: str) -> str: from a notification log. """ section = self.app.notification_log[section_id] - return json.dumps( - { - "id": section.id, - "next_id": section.next_id, - "items": [ - { - "id": item.id, - "originator_id": item.originator_id.hex, - "originator_version": item.originator_version, - "topic": item.topic, - "state": b64encode(item.state).decode("utf8"), - } - for item in section.items - ], - } - ) + return json.dumps({ + "id": section.id, + "next_id": section.next_id, + "items": [ + { + "id": item.id, + "originator_id": item.originator_id.hex, + "originator_version": item.originator_version, + "topic": item.topic, + "state": b64encode(item.state).decode("utf8"), + } + for item in section.items + ], + }) def get_notifications( self, start: int, limit: int, topics: Sequence[str] = () @@ -73,18 +71,16 @@ def get_notifications( notifications = self.app.notification_log.select( start=start, limit=limit, topics=topics ) - return json.dumps( - [ - { - "id": notification.id, - "originator_id": notification.originator_id.hex, - "originator_version": notification.originator_version, - "topic": notification.topic, - "state": b64encode(notification.state).decode("utf8"), - } - for notification in notifications - ] - ) + return json.dumps([ + { + "id": notification.id, + "originator_id": notification.originator_id.hex, + "originator_version": notification.originator_version, + "topic": notification.topic, + "state": b64encode(notification.state).decode("utf8"), + } + for notification in notifications + ]) class NotificationLogJSONClient(NotificationLog): diff --git a/eventsourcing/postgres.py b/eventsourcing/postgres.py index d5965914..e281906d 100644 --- a/eventsourcing/postgres.py +++ b/eventsourcing/postgres.py @@ -158,7 +158,7 @@ def _create_connection(self) -> PostgresConnection: except psycopg2.OperationalError as e: raise OperationalError(e) from e pg_conn.cursor().execute( - f"SET idle_in_transaction_session_timeout = " + "SET idle_in_transaction_session_timeout = " f"'{self.idle_in_transaction_session_timeout}s'" ) return PostgresConnection(pg_conn, max_age=self.max_age) @@ -537,7 +537,7 @@ def __init__( super().__init__(datastore, events_table_name) self.insert_events_statement = ( f"INSERT INTO {self.events_table_name} VALUES ($1, $2, $3, $4) " - f"RETURNING notification_id" + "RETURNING notification_id" ) self.max_notification_id_statement = ( f"SELECT MAX(notification_id) FROM {self.events_table_name}" @@ -552,19 +552,23 @@ def __init__( def construct_create_table_statements(self) -> List[str]: statements = [ - "CREATE TABLE IF NOT EXISTS " - f"{self.events_table_name} (" - "originator_id uuid NOT NULL, " - "originator_version bigint NOT NULL, " - "topic text, " - "state bytea, " - "notification_id bigserial, " - "PRIMARY KEY " - "(originator_id, originator_version)) " - "WITH (autovacuum_enabled=false)", - f"CREATE UNIQUE INDEX IF NOT EXISTS " - f"{self.notification_id_index_name}" - f"ON {self.events_table_name} (notification_id ASC);", + ( + "CREATE TABLE IF NOT EXISTS " + f"{self.events_table_name} (" + "originator_id uuid NOT NULL, " + "originator_version bigint NOT NULL, " + "topic text, " + "state bytea, " + "notification_id bigserial, " + "PRIMARY KEY " + "(originator_id, originator_version)) " + "WITH (autovacuum_enabled=false)" + ), + ( + "CREATE UNIQUE INDEX IF NOT EXISTS " + f"{self.notification_id_index_name}" + f"ON {self.events_table_name} (notification_id ASC);" + ), ] return statements @@ -582,9 +586,7 @@ def select_notifications( """ params: List[Union[int, str, Sequence[str]]] = [start] - statement = ( - "SELECT * " f"FROM {self.events_table_name} " "WHERE notification_id>=$1 " - ) + statement = f"SELECT * FROM {self.events_table_name} WHERE notification_id>=$1 " statement_name = f"select_notifications_{self.events_table_name}".replace( ".", "_" ) @@ -600,7 +602,7 @@ def select_notifications( statement_name += "_topics" params.append(limit) - statement += "ORDER BY notification_id " f"LIMIT ${len(params)}" + statement += f"ORDER BY notification_id LIMIT ${len(params)}" notifications = [] with self.datastore.get_connection() as conn: @@ -838,9 +840,9 @@ def __init__(self, env: Environment): connect_timeout = int(connect_timeout_str) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_CONNECT_TIMEOUT}' is invalid. " - f"If set, an integer or empty string is expected: " + "If set, an integer or empty string is expected: " f"'{connect_timeout_str}'" ) @@ -854,9 +856,9 @@ def __init__(self, env: Environment): ) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT}' is invalid. " - f"If set, an integer or empty string is expected: " + "If set, an integer or empty string is expected: " f"'{idle_in_transaction_session_timeout_str}'" ) @@ -871,9 +873,9 @@ def __init__(self, env: Environment): pool_size = int(pool_size_str) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_POOL_SIZE}' is invalid. " - f"If set, an integer or empty string is expected: " + "If set, an integer or empty string is expected: " f"'{pool_size_str}'" ) @@ -888,9 +890,9 @@ def __init__(self, env: Environment): pool_max_overflow = int(pool_max_overflow_str) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_POOL_MAX_OVERFLOW}' is invalid. " - f"If set, an integer or empty string is expected: " + "If set, an integer or empty string is expected: " f"'{pool_max_overflow_str}'" ) @@ -905,9 +907,9 @@ def __init__(self, env: Environment): pool_timeout = float(pool_timeout_str) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_POOL_TIMEOUT}' is invalid. " - f"If set, a float or empty string is expected: " + "If set, a float or empty string is expected: " f"'{pool_timeout_str}'" ) @@ -922,9 +924,9 @@ def __init__(self, env: Environment): conn_max_age = float(conn_max_age_str) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_CONN_MAX_AGE}' is invalid. " - f"If set, a float or empty string is expected: " + "If set, a float or empty string is expected: " f"'{conn_max_age_str}'" ) @@ -936,9 +938,9 @@ def __init__(self, env: Environment): lock_timeout = int(lock_timeout_str) except ValueError: raise EnvironmentError( - f"Postgres environment value for key " + "Postgres environment value for key " f"'{self.POSTGRES_LOCK_TIMEOUT}' is invalid. " - f"If set, an integer or empty string is expected: " + "If set, an integer or empty string is expected: " f"'{lock_timeout_str}'" ) diff --git a/eventsourcing/sqlite.py b/eventsourcing/sqlite.py index 7ff5b74a..73f40d87 100644 --- a/eventsourcing/sqlite.py +++ b/eventsourcing/sqlite.py @@ -256,7 +256,7 @@ def __init__( f"INSERT INTO {self.events_table_name} VALUES (?,?,?,?)" ) self.select_events_statement = ( - "SELECT * " f"FROM {self.events_table_name} " "WHERE originator_id=? " + f"SELECT * FROM {self.events_table_name} WHERE originator_id=? " ) def construct_create_table_statements(self) -> List[str]: @@ -293,14 +293,12 @@ def _insert_events( ) -> Optional[Sequence[int]]: params = [] for stored_event in stored_events: - params.append( - ( - stored_event.originator_id.hex, - stored_event.originator_version, - stored_event.topic, - stored_event.state, - ) - ) + params.append(( + stored_event.originator_id.hex, + stored_event.originator_version, + stored_event.topic, + stored_event.state, + )) c.executemany(self.insert_events_statement, params) return None @@ -405,7 +403,7 @@ def select_notifications( notifications = [] params: List[Union[int, str]] = [start] - statement = f"SELECT rowid, * FROM {self.events_table_name} " "WHERE rowid>=? " + statement = f"SELECT rowid, * FROM {self.events_table_name} WHERE rowid>=? " if stop is not None: params.append(stop) @@ -538,9 +536,9 @@ def __init__(self, env: Environment): lock_timeout = int(lock_timeout_str) except ValueError: raise EnvironmentError( - f"SQLite environment value for key " + "SQLite environment value for key " f"'{self.SQLITE_LOCK_TIMEOUT}' is invalid. " - f"If set, an int or empty string is expected: " + "If set, an int or empty string is expected: " f"'{lock_timeout_str}'" ) diff --git a/eventsourcing/system.py b/eventsourcing/system.py index b8c6bbb6..bb2b99a4 100644 --- a/eventsourcing/system.py +++ b/eventsourcing/system.py @@ -665,9 +665,9 @@ def receive_recording_event(self, recording_event: RecordingEvent) -> None: ), ) - self._previous_max_notification_ids[ - leader_name - ] = recording_event.recordings[-1].notification.id + self._previous_max_notification_ids[leader_name] = ( + recording_event.recordings[-1].notification.id + ) finally: self._processing_lock.release() diff --git a/eventsourcing/tests/application.py b/eventsourcing/tests/application.py index 89667612..e761c146 100644 --- a/eventsourcing/tests/application.py +++ b/eventsourcing/tests/application.py @@ -270,10 +270,8 @@ def test_resolve_persistence_topics(self): Application(env={"PERSISTENCE_MODULE": "eventsourcing.application"}) self.assertEqual( cm.exception.args[0], - ( - "Found 0 infrastructure factory classes in " - "'eventsourcing.application', expected 1." - ), + "Found 0 infrastructure factory classes in " + "'eventsourcing.application', expected 1.", ) with self.assertRaises(AssertionError) as cm: @@ -282,10 +280,8 @@ def test_resolve_persistence_topics(self): ) self.assertEqual( cm.exception.args[0], - ( - "Not an infrastructure factory class or module: " - "eventsourcing.application:Application" - ), + "Not an infrastructure factory class or module: " + "eventsourcing.application:Application", ) def test_save_returns_recording_event(self): @@ -318,13 +314,11 @@ def test_take_snapshot_raises_assertion_error_if_snapshotting_not_enabled(self): app.take_snapshot(uuid4()) self.assertEqual( cm.exception.args[0], - ( - "Can't take snapshot without snapshots store. Please " - "set environment variable IS_SNAPSHOTTING_ENABLED to " - "a true value (e.g. 'y'), or set 'is_snapshotting_enabled' " - "on application class, or set 'snapshotting_intervals' on " - "application class." - ), + "Can't take snapshot without snapshots store. Please " + "set environment variable IS_SNAPSHOTTING_ENABLED to " + "a true value (e.g. 'y'), or set 'is_snapshotting_enabled' " + "on application class, or set 'snapshotting_intervals' on " + "application class.", ) def test_application_with_cached_aggregates_and_fastforward(self): diff --git a/eventsourcing/tests/docs_tests/test_docs.py b/eventsourcing/tests/docs_tests/test_docs.py index deaf5c20..51832fff 100644 --- a/eventsourcing/tests/docs_tests/test_docs.py +++ b/eventsourcing/tests/docs_tests/test_docs.py @@ -271,7 +271,12 @@ def check_code_snippets_in_file(self, doc_path): tempfile.flush() # Run the code and catch errors. - p = Popen([sys.executable, temp_path], stdout=PIPE, stderr=PIPE, env={"PYTHONPATH": base_dir}) + p = Popen( + [sys.executable, temp_path], + stdout=PIPE, + stderr=PIPE, + env={"PYTHONPATH": base_dir}, + ) out, err = p.communicate() out = out.decode("utf8") err = err.decode("utf8") diff --git a/eventsourcing/tests/domain_tests/test_aggregate.py b/eventsourcing/tests/domain_tests/test_aggregate.py index c55198bf..d9f2c15a 100644 --- a/eventsourcing/tests/domain_tests/test_aggregate.py +++ b/eventsourcing/tests/domain_tests/test_aggregate.py @@ -133,10 +133,8 @@ def create(cls, name): method_name = get_method_name(BrokenAggregate.Created.__init__) self.assertEqual( - ( - f"Unable to construct 'Created' event: " - f"{method_name}() got an unexpected keyword argument 'name'" - ), + "Unable to construct 'Created' event: " + f"{method_name}() got an unexpected keyword argument 'name'", cm.exception.args[0], ) @@ -352,7 +350,7 @@ def __init__(self, *, value): self.assertEqual( cm.exception.args[0], f"{get_method_name(MyAgg.__init__)}() missing 1 required " - f"keyword-only argument: 'value'", + "keyword-only argument: 'value'", ) def test_raises_when_init_missing_required_positional_and_keyword_only_arg(self): @@ -791,7 +789,7 @@ def assert_id_dataclass_style(cls): self.assertEqual( cm.exception.args[0], f"{get_method_name(cls.__init__)}() missing 2 " - f"required positional arguments: 'id' and 'name'", + "required positional arguments: 'id' and 'name'", ) # Just check it works if used properly. @@ -994,7 +992,7 @@ def apply(self, aggregate: TAggregate) -> None: "version=1, " f"created_on={a.created_on!r}, " f"modified_on={a.modified_on!r}, " - f"a=1" + "a=1" ")" ) self.assertEqual(expect, repr(a)) @@ -1006,8 +1004,8 @@ def apply(self, aggregate: TAggregate) -> None: "version=2, " f"created_on={a.created_on!r}, " f"modified_on={a.modified_on!r}, " - f"a=1, " - f"b=2" + "a=1, " + "b=2" ")" ) self.assertEqual(expect, repr(a)) @@ -1028,7 +1026,7 @@ def apply(self, aggregate: TAggregate) -> None: "version=1, " f"created_on={a.created_on!r}, " f"modified_on={a.modified_on!r}, " - f"a=1" + "a=1" ")" ) self.assertEqual(expect, repr(a)) @@ -1040,8 +1038,8 @@ def apply(self, aggregate: TAggregate) -> None: "version=2, " f"created_on={a.created_on!r}, " f"modified_on={a.modified_on!r}, " - f"a=1, " - f"b=2" + "a=1, " + "b=2" ")" ) self.assertEqual(expect, repr(a)) diff --git a/eventsourcing/tests/interface_tests/test_remotenotificationlog.py b/eventsourcing/tests/interface_tests/test_remotenotificationlog.py index 4aee7fc5..81cb2294 100644 --- a/eventsourcing/tests/interface_tests/test_remotenotificationlog.py +++ b/eventsourcing/tests/interface_tests/test_remotenotificationlog.py @@ -152,12 +152,10 @@ def __init__(self, interface: BankAccountsInterface): self.log = NotificationLogJSONClient(interface) def open_account(self, full_name, email_address) -> UUID: - body = json.dumps( - { - "full_name": full_name, - "email_address": email_address, - } - ) + body = json.dumps({ + "full_name": full_name, + "email_address": email_address, + }) body = self.interface.open_account(body) return UUID(json.loads(body)["account_id"]) diff --git a/eventsourcing/tests/persistence.py b/eventsourcing/tests/persistence.py index 1e4c07fd..32cd2abf 100644 --- a/eventsourcing/tests/persistence.py +++ b/eventsourcing/tests/persistence.py @@ -1288,11 +1288,9 @@ def test_custom_type_error(self): self.assertEqual( cm.exception.args[0], - ( - "Object of type is not serializable. Please define " - "and register a custom transcoding for this type." - ), + "Object of type is not serializable. Please define " + "and register a custom transcoding for this type.", ) # Expect a TypeError when encoding because transcoding not registered (nested). @@ -1301,11 +1299,9 @@ def test_custom_type_error(self): self.assertEqual( cm.exception.args[0], - ( - "Object of type is not serializable. Please define " - "and register a custom transcoding for this type." - ), + "Object of type is not serializable. Please define " + "and register a custom transcoding for this type.", ) # Check we get a TypeError when decoding because transcodings aren't registered. @@ -1316,8 +1312,6 @@ def test_custom_type_error(self): self.assertEqual( cm.exception.args[0], - ( - "Data serialized with name 'custom_type3_as_dict' is not " - "deserializable. Please register a custom transcoding for this type." - ), + "Data serialized with name 'custom_type3_as_dict' is not " + "deserializable. Please register a custom transcoding for this type.", ) diff --git a/eventsourcing/tests/persistence_tests/test_postgres.py b/eventsourcing/tests/persistence_tests/test_postgres.py index 62ceab48..a7213a78 100644 --- a/eventsourcing/tests/persistence_tests/test_postgres.py +++ b/eventsourcing/tests/persistence_tests/test_postgres.py @@ -498,11 +498,9 @@ def test_report_on_prepared_statements(self): self.assertEqual(pg[0][0], select_alias) self.assertEqual( pg[0][1], - ( - f"PREPARE {select_alias} AS SELECT * FROM " - f"{qualified_table_name} WHERE originator_id = $1 ORDER " - "BY originator_version ASC" - ), + f"PREPARE {select_alias} AS SELECT * FROM " + f"{qualified_table_name} WHERE originator_id = $1 ORDER " + "BY originator_version ASC", ) self.assertEqual(pg[0][3], "{uuid}") self.assertEqual(pg[0][4], True) @@ -569,7 +567,7 @@ def test_retry_insert_events_after_deallocating_prepared_statement(self): statement_name = recorder.insert_events_statement_name self.assertIn(statement_name, conn.is_prepared) conn.cursor().execute( - f"DEALLOCATE " f"{recorder.statement_name_aliases[statement_name]}" + f"DEALLOCATE {recorder.statement_name_aliases[statement_name]}" ) # Write a stored event. diff --git a/eventsourcing/tests/system_tests/test_runner.py b/eventsourcing/tests/system_tests/test_runner.py index 539799ba..b24a512b 100644 --- a/eventsourcing/tests/system_tests/test_runner.py +++ b/eventsourcing/tests/system_tests/test_runner.py @@ -400,14 +400,10 @@ def test_ignores_recording_event_if_seen_subsequent(self): def test_received_notifications_accumulate(self): self.start_runner( - System( - [ - [ - BankAccounts, - EmailProcess, - ] - ] - ) + System([[ + BankAccounts, + EmailProcess, + ]]) ) accounts = self.runner.get(BankAccounts) @@ -675,27 +671,27 @@ class TestMultiThreadedRunnerWithSQLiteInMemory(TestMultiThreadedRunner): def setUp(self): super().setUp() os.environ["PERSISTENCE_MODULE"] = "eventsourcing.sqlite" - os.environ[ - f"{BankAccounts.name.upper()}_SQLITE_DBNAME" - ] = f"file:{BankAccounts.name.lower()}?mode=memory&cache=shared" - os.environ[ - f"{EmailProcess.name.upper()}_SQLITE_DBNAME" - ] = f"file:{EmailProcess.name.lower()}?mode=memory&cache=shared" - os.environ[ - f"MY{EmailProcess.name.upper()}_SQLITE_DBNAME" - ] = f"file:{EmailProcess.name.lower()}?mode=memory&cache=shared" - os.environ[ - f"{EmailProcess.name.upper()}2_SQLITE_DBNAME" - ] = f"file:{EmailProcess.name.lower()}2?mode=memory&cache=shared" - os.environ[ - "BROKENPROCESSING_SQLITE_DBNAME" - ] = "file:brokenprocessing?mode=memory&cache=shared" - os.environ[ - "BROKENCONVERTING_SQLITE_DBNAME" - ] = "file:brokenconverting?mode=memory&cache=shared" - os.environ[ - "BROKENPULLING_SQLITE_DBNAME" - ] = "file:brokenprocessing?mode=memory&cache=shared" + os.environ[f"{BankAccounts.name.upper()}_SQLITE_DBNAME"] = ( + f"file:{BankAccounts.name.lower()}?mode=memory&cache=shared" + ) + os.environ[f"{EmailProcess.name.upper()}_SQLITE_DBNAME"] = ( + f"file:{EmailProcess.name.lower()}?mode=memory&cache=shared" + ) + os.environ[f"MY{EmailProcess.name.upper()}_SQLITE_DBNAME"] = ( + f"file:{EmailProcess.name.lower()}?mode=memory&cache=shared" + ) + os.environ[f"{EmailProcess.name.upper()}2_SQLITE_DBNAME"] = ( + f"file:{EmailProcess.name.lower()}2?mode=memory&cache=shared" + ) + os.environ["BROKENPROCESSING_SQLITE_DBNAME"] = ( + "file:brokenprocessing?mode=memory&cache=shared" + ) + os.environ["BROKENCONVERTING_SQLITE_DBNAME"] = ( + "file:brokenconverting?mode=memory&cache=shared" + ) + os.environ["BROKENPULLING_SQLITE_DBNAME"] = ( + "file:brokenprocessing?mode=memory&cache=shared" + ) os.environ["COMMANDS_SQLITE_DBNAME"] = "file:commands?mode=memory&cache=shared" os.environ["RESULTS_SQLITE_DBNAME"] = "file:results?mode=memory&cache=shared" diff --git a/eventsourcing/tests/utils_tests/test_utils.py b/eventsourcing/tests/utils_tests/test_utils.py index 1b03f376..4837654c 100644 --- a/eventsourcing/tests/utils_tests/test_utils.py +++ b/eventsourcing/tests/utils_tests/test_utils.py @@ -191,8 +191,7 @@ def test_topic_errors(self): with self.assertRaises(TopicError) as cm: resolve_topic("oldmodule:Aggregate") expected_msg = ( - "Failed to resolve topic 'oldmodule:Aggregate': " - "No module named 'oldmodule'" + "Failed to resolve topic 'oldmodule:Aggregate': No module named 'oldmodule'" ) self.assertEqual(expected_msg, cm.exception.args[0]) diff --git a/eventsourcing/utils.py b/eventsourcing/utils.py index aacddb22..1cd9adae 100644 --- a/eventsourcing/utils.py +++ b/eventsourcing/utils.py @@ -242,12 +242,12 @@ def __init__(self, name: str = "", env: Optional[EnvType] = None): self.name = name @overload - def get(self, key: str) -> Optional[str]: - ... # pragma: no cover + def get(self, key: str) -> Optional[str]: ... # pragma: no cover @overload - def get(self, key: str, default: Union[str, T]) -> Union[str, T]: - ... # pragma: no cover + def get( + self, key: str, default: Union[str, T] + ) -> Union[str, T]: ... # pragma: no cover def get( self, key: str, default: Optional[Union[str, T]] = None From e6b25bcff4b777c2229906a88b56a61a2caa5b2f Mon Sep 17 00:00:00 2001 From: johnbywater Date: Sun, 21 Jan 2024 04:59:11 +0000 Subject: [PATCH 107/107] Dropped Python 3.7 from build matrix. --- .github/workflows/runtests.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/runtests.yaml b/.github/workflows/runtests.yaml index c974d2f7..df84b04b 100644 --- a/.github/workflows/runtests.yaml +++ b/.github/workflows/runtests.yaml @@ -10,7 +10,7 @@ jobs: max-parallel: 20 fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] postgres-version: ["12", "13", "14", "15"] # Service containers to run with `runner-job` @@ -50,7 +50,6 @@ jobs: - name: Lint run: make lint - if: matrix.python-version != '3.7' - name: Test run: make test @@ -69,7 +68,7 @@ jobs: max-parallel: 5 fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v2