From fb8da82a0d0f8d8dd93d7c3d2d9d075489c3b0a5 Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 12 Aug 2024 16:20:53 +0530 Subject: [PATCH 01/27] Checks for correct signature used by the implementation class. --- xcov19/app/services.py | 20 +++++++++++++++----- xcov19/tests/conftest.py | 15 ++++++++++++--- xcov19/utils/__init__.py | 0 xcov19/utils/mixins.py | 27 +++++++++++++++++++++++++++ 4 files changed, 54 insertions(+), 8 deletions(-) create mode 100644 xcov19/utils/__init__.py create mode 100644 xcov19/utils/mixins.py diff --git a/xcov19/app/services.py b/xcov19/app/services.py index 3e60d4c..75cfecf 100644 --- a/xcov19/app/services.py +++ b/xcov19/app/services.py @@ -9,19 +9,21 @@ """ import abc +from collections.abc import Callable from typing import Protocol, Tuple from rodi import Container from xcov19.app.dto import Address, LocationQueryJSON, FacilitiesResult, GeoLocation from xcov19.app.settings import Settings +from xcov19.utils.mixins import InterfaceProtocolCheckMixin def configure_services(settings: Settings) -> Tuple[Container, Settings]: container = Container() container.add_instance(settings) - container.add_scoped(LocationQueryServiceInterface, GeolocationQueryServiceImpl) + container.add_scoped(LocationQueryServiceInterface, GeolocationQueryService) return container, settings @@ -39,7 +41,9 @@ class LocationQueryServiceInterface[T: LocationQueryJSON](Protocol): @classmethod @abc.abstractmethod - async def resolve_coordinates(cls, query: T) -> Address: + async def resolve_coordinates( + cls, reverse_geo_lookup_svc: Callable[[T], dict], query: T + ) -> Address: raise NotImplementedError @classmethod @@ -49,10 +53,16 @@ async def fetch_facilities(cls, query: T) -> FacilitiesResult: # TODO: make hard-coded response functional -class GeolocationQueryServiceImpl(LocationQueryServiceInterface): +class GeolocationQueryService( + LocationQueryServiceInterface, InterfaceProtocolCheckMixin +): @classmethod - async def resolve_coordinates(cls, query: LocationQueryJSON) -> Address: - return Address() + async def resolve_coordinates( + cls, + reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + query: LocationQueryJSON, + ) -> Address: + return Address(**reverse_geo_lookup_svc(query)) @classmethod async def fetch_facilities(cls, query: LocationQueryJSON) -> FacilitiesResult: diff --git a/xcov19/tests/conftest.py b/xcov19/tests/conftest.py index 8a6bcd7..e4978cf 100644 --- a/xcov19/tests/conftest.py +++ b/xcov19/tests/conftest.py @@ -1,3 +1,5 @@ +from collections.abc import Callable + import pytest from xcov19.app.dto import ( @@ -9,6 +11,7 @@ FacilitiesResult, ) from xcov19.app.services import LocationQueryServiceInterface +from xcov19.utils.mixins import InterfaceProtocolCheckMixin # Same as using @pytest.mark.anyio pytestmark = pytest.mark.anyio @@ -43,10 +46,16 @@ def stub_location_srvc(): return StubLocationQueryServiceImpl -class StubLocationQueryServiceImpl(LocationQueryServiceInterface): +class StubLocationQueryServiceImpl( + LocationQueryServiceInterface, InterfaceProtocolCheckMixin +): @classmethod - async def resolve_coordinates(cls, query: LocationQueryJSON) -> Address: - return Address() + async def resolve_coordinates( + cls, + reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + query: LocationQueryJSON, + ) -> Address: + return Address(**reverse_geo_lookup_svc(query)) @classmethod async def fetch_facilities(cls, query: LocationQueryJSON) -> FacilitiesResult: diff --git a/xcov19/utils/__init__.py b/xcov19/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/xcov19/utils/mixins.py b/xcov19/utils/mixins.py new file mode 100644 index 0000000..1284031 --- /dev/null +++ b/xcov19/utils/mixins.py @@ -0,0 +1,27 @@ +import inspect + + +class InterfaceProtocolCheckMixin: + """Checks for correct signature used by the implementation class. + + Drop in mixin wherever an implementation is subclasses with an + interface definition. + """ + + def __init_subclass__(cls, **kwargs): + parent_class = inspect.getmro(cls)[1] + for defined_method in ( + method + for method in dir(cls) + if not method.startswith("__") and callable(getattr(cls, method)) + ): + cls_method = getattr(parent_class, defined_method) + subclass_method = getattr(cls, defined_method) + cls_method_params = inspect.signature(cls_method).parameters + subclass_method_params = inspect.signature(subclass_method).parameters + if cls_method_params.keys() != subclass_method_params.keys(): + raise NotImplementedError(f"""Signature for {defined_method} not correct: + Expected: {list(cls_method_params.keys())} + Got: {list(subclass_method_params.keys())} + """) + super().__init_subclass__(**kwargs) From dbf8df1fbf67a3c2ba9b1431ec8280891b200713 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 13 Aug 2024 02:17:37 +0530 Subject: [PATCH 02/27] Implements signatures for functional and decoupled GeolocationQueryService --- xcov19/app/services.py | 41 +++++++++-------- xcov19/tests/conftest.py | 38 +++++++++------ xcov19/tests/test_services.py | 87 ++++++++++++++++++++++++++++------- 3 files changed, 116 insertions(+), 50 deletions(-) diff --git a/xcov19/app/services.py b/xcov19/app/services.py index 75cfecf..2c2e202 100644 --- a/xcov19/app/services.py +++ b/xcov19/app/services.py @@ -10,11 +10,11 @@ import abc from collections.abc import Callable -from typing import Protocol, Tuple +from typing import Protocol, Tuple, List from rodi import Container -from xcov19.app.dto import Address, LocationQueryJSON, FacilitiesResult, GeoLocation +from xcov19.app.dto import Address, LocationQueryJSON, FacilitiesResult from xcov19.app.settings import Settings from xcov19.utils.mixins import InterfaceProtocolCheckMixin @@ -23,14 +23,14 @@ def configure_services(settings: Settings) -> Tuple[Container, Settings]: container = Container() container.add_instance(settings) - container.add_scoped(LocationQueryServiceInterface, GeolocationQueryService) + container.add_singleton(LocationQueryServiceInterface, GeolocationQueryService) return container, settings class LocationQueryServiceInterface[T: LocationQueryJSON](Protocol): """Location aware service for listing faciltiies. - 1. Searches and fetches existing for query_id for a cust_id + 1. Searches and fetches existing processed results by query_id for a cust_id 2. Resolves coordinates from a given geolocation. 3. Fetches all facilities from a given set of records for a given radius from geolocation. @@ -48,7 +48,12 @@ async def resolve_coordinates( @classmethod @abc.abstractmethod - async def fetch_facilities(cls, query: T) -> FacilitiesResult: + async def fetch_facilities( + cls, + reverse_geo_lookup_svc: Callable[[T], dict], + patient_query_lookup_svc: Callable[[Address, T], List[FacilitiesResult]], + query: T, + ) -> List[FacilitiesResult] | None: raise NotImplementedError @@ -62,20 +67,18 @@ async def resolve_coordinates( reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], query: LocationQueryJSON, ) -> Address: + """Resolves to address by geo reverse lookup.""" return Address(**reverse_geo_lookup_svc(query)) @classmethod - async def fetch_facilities(cls, query: LocationQueryJSON) -> FacilitiesResult: - return FacilitiesResult( - name="Test facility", - address=Address(), - geolocation=GeoLocation(lat=0.0, lng=0.0), - contact="+919999999999", - facility_type="nursing", - ownership="charity", - specialties=["surgery", "pediatrics"], - stars=4, - reviews=120, - rank=2, - estimated_time=20, - ) + async def fetch_facilities( + cls, + reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + patient_query_lookup_svc: Callable[ + [Address, LocationQueryJSON], List[FacilitiesResult] + ], + query: LocationQueryJSON, + ) -> List[FacilitiesResult] | None: + """Fetches facilities for a query location for a query id for a customer.""" + patient_address = await cls.resolve_coordinates(reverse_geo_lookup_svc, query) + return patient_query_lookup_svc(patient_address, query) or None diff --git a/xcov19/tests/conftest.py b/xcov19/tests/conftest.py index e4978cf..812f19e 100644 --- a/xcov19/tests/conftest.py +++ b/xcov19/tests/conftest.py @@ -1,4 +1,5 @@ from collections.abc import Callable +from typing import List import pytest @@ -58,17 +59,26 @@ async def resolve_coordinates( return Address(**reverse_geo_lookup_svc(query)) @classmethod - async def fetch_facilities(cls, query: LocationQueryJSON) -> FacilitiesResult: - return FacilitiesResult( - name="Test facility", - address=Address(), - geolocation=GeoLocation(lat=0.0, lng=0.0), - contact="+919999999999", - facility_type="nursing", - ownership="charity", - specialties=["surgery", "pediatrics"], - stars=4, - reviews=120, - rank=2, - estimated_time=20, - ) + async def fetch_facilities( + cls, + reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + patient_query_lookup_svc: Callable[ + [Address, LocationQueryJSON], List[FacilitiesResult] + ], + query: LocationQueryJSON, + ) -> List[FacilitiesResult] | None: + return [ + FacilitiesResult( + name="Test facility", + address=Address(), + geolocation=GeoLocation(lat=0.0, lng=0.0), + contact="+919999999999", + facility_type="nursing", + ownership="charity", + specialties=["surgery", "pediatrics"], + stars=4, + reviews=120, + rank=2, + estimated_time=20, + ) + ] diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index 839f89a..a01ab13 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,12 +1,40 @@ import pytest import unittest -from xcov19.app.services import LocationQueryServiceInterface +from xcov19.app.services import LocationQueryServiceInterface, GeolocationQueryService from xcov19.app.dto import Address, LocationQueryJSON, FacilitiesResult, GeoLocation +def dummy_reverse_geo_lookup_svc(query: LocationQueryJSON) -> dict: + return {} + + +def dummy_patient_query_lookup_svc_none( + address: Address, query: LocationQueryJSON +) -> list: + return [] + + +def dummy_patient_query_lookup_svc(address: Address, query: LocationQueryJSON) -> list: + return [ + FacilitiesResult( + name="Test facility", + address=Address(), + geolocation=GeoLocation(lat=0.0, lng=0.0), + contact="+919999999999", + facility_type="nursing", + ownership="charity", + specialties=["surgery", "pediatrics"], + stars=4, + reviews=120, + rank=2, + estimated_time=20, + ) + ] + + @pytest.mark.usefixtures("dummy_geolocation", "stub_location_srvc") -class GeoLocationServiceTest(unittest.IsolatedAsyncioTestCase): +class GeoLocationServiceInterfaceTest(unittest.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) def autouse( self, @@ -18,23 +46,48 @@ def autouse( async def test_resolve_coordinates(self): result = await self.stub_location_srvc.resolve_coordinates( - self.dummy_geolocation + dummy_reverse_geo_lookup_svc, self.dummy_geolocation ) self.assertEqual(Address(), result) async def test_fetch_facilities(self): - result = await self.stub_location_srvc.fetch_facilities(self.dummy_geolocation) - expected = FacilitiesResult( - name="Test facility", - address=Address(), - geolocation=GeoLocation(lat=0.0, lng=0.0), - contact="+919999999999", - facility_type="nursing", - ownership="charity", - specialties=["surgery", "pediatrics"], - stars=4, - reviews=120, - rank=2, - estimated_time=20, + result = await self.stub_location_srvc.fetch_facilities( + dummy_reverse_geo_lookup_svc, + dummy_patient_query_lookup_svc, + self.dummy_geolocation, + ) + self.assertIsInstance(result, list) + + +@pytest.mark.usefixtures("dummy_geolocation", "stub_location_srvc") +class GeoLocationServiceTest(unittest.IsolatedAsyncioTestCase): + @pytest.fixture(autouse=True) + def autouse(self, dummy_geolocation: LocationQueryJSON): + self.dummy_geolocation = dummy_geolocation + + async def test_resolve_coordinates(self): + result = await GeolocationQueryService.resolve_coordinates( + dummy_reverse_geo_lookup_svc, self.dummy_geolocation + ) + expected = Address() + self.assertEqual(expected, result, f"Got {result}, expected {expected}") + + async def test_fetch_facilities(self): + result = await GeolocationQueryService.fetch_facilities( + dummy_reverse_geo_lookup_svc, + dummy_patient_query_lookup_svc, + self.dummy_geolocation, + ) + self.assertIsNotNone(result) + record = None + if result: + record = result[0] + self.assertIsInstance(record, FacilitiesResult) + + async def test_fetch_facilities_no_results(self): + result = await GeolocationQueryService.fetch_facilities( + dummy_reverse_geo_lookup_svc, + dummy_patient_query_lookup_svc_none, + self.dummy_geolocation, ) - self.assertEqual(expected, result) + self.assertIsNone(result) From bfa02f99e427a5686f70c45db1008600d0fdfb03 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 13 Aug 2024 02:19:12 +0530 Subject: [PATCH 03/27] adds make test --- Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Makefile b/Makefile index 8ca99fc..b077b07 100644 --- a/Makefile +++ b/Makefile @@ -9,3 +9,6 @@ install: pip-install: pip install --prefer-binary --use-pep517 --check-build-dependencies .[dev] + +test: + pytest -s xcov19/tests/ From a55413c7a3eca6bedcf40c6372febc6f98102328 Mon Sep 17 00:00:00 2001 From: codecakes Date: Thu, 22 Aug 2024 04:07:46 +0530 Subject: [PATCH 04/27] ensure signature type checks for interface implementations --- xcov19/app/services.py | 10 +++++- xcov19/tests/test_services.py | 5 +++ xcov19/utils/mixins.py | 60 +++++++++++++++++++++++++++++------ 3 files changed, 65 insertions(+), 10 deletions(-) diff --git a/xcov19/app/services.py b/xcov19/app/services.py index 2c2e202..3939018 100644 --- a/xcov19/app/services.py +++ b/xcov19/app/services.py @@ -8,9 +8,11 @@ https://github.com/Neoteroi/rodi/tree/main/examples """ +from __future__ import annotations + import abc from collections.abc import Callable -from typing import Protocol, Tuple, List +from typing import Protocol, Tuple, List, TypeVar from rodi import Container @@ -18,6 +20,8 @@ from xcov19.app.settings import Settings from xcov19.utils.mixins import InterfaceProtocolCheckMixin +T = TypeVar("T", bound=LocationQueryJSON) + def configure_services(settings: Settings) -> Tuple[Container, Settings]: container = Container() @@ -82,3 +86,7 @@ async def fetch_facilities( """Fetches facilities for a query location for a query id for a customer.""" patient_address = await cls.resolve_coordinates(reverse_geo_lookup_svc, query) return patient_query_lookup_svc(patient_address, query) or None + + +# TODO: Implement reverse_geo_lookup_svc +# TODO: Implement following method in patient_query_lookup_svc: diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index a01ab13..f219e18 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -91,3 +91,8 @@ async def test_fetch_facilities_no_results(self): self.dummy_geolocation, ) self.assertIsNone(result) + + +class PatientQueryLookupSvcTest(unittest.TestCase): + @pytest.mark.skip("In a seperate class.") + def test_patient_query_lookup_svc(self): ... diff --git a/xcov19/utils/mixins.py b/xcov19/utils/mixins.py index 1284031..fb90e90 100644 --- a/xcov19/utils/mixins.py +++ b/xcov19/utils/mixins.py @@ -1,4 +1,40 @@ import inspect +import operator +from typing import Tuple, Any, TypeVar, get_type_hints + + +ClassNameAttrGetter = operator.attrgetter("__name__") +BoundAttrGetter = operator.attrgetter("__bound__") + + +def match_signature( + cls_signature: Tuple[str, Any], subclass_signature: Tuple[str, Any] +): + """Match inspect signature by their names and type annotation.""" + param_name, param_type = cls_signature + subcls_param_name, subcls_param_type = subclass_signature + if param_name != subcls_param_name: + raise NotImplementedError( + f"""Method name mismatch: + Expected: {param_name} + Got: {subcls_param_name} + """ + ) + + if ClassNameAttrGetter(param_type) != ClassNameAttrGetter(subcls_param_type): + if ( + isinstance(param_type, TypeVar) + and BoundAttrGetter(param_type) == subcls_param_type + ): + return True + raise NotImplementedError( + f""" + Signature mismatch for parameter {param_name}: + Expected: {param_type} + Got: {subcls_param_type} + """ + ) + return True class InterfaceProtocolCheckMixin: @@ -10,18 +46,24 @@ class InterfaceProtocolCheckMixin: def __init_subclass__(cls, **kwargs): parent_class = inspect.getmro(cls)[1] + # raise Exception(inspect.getmembers(cls, predicate=inspect.isfunction)) for defined_method in ( - method - for method in dir(cls) - if not method.startswith("__") and callable(getattr(cls, method)) + method_name + for method_name, _ in inspect.getmembers(cls, predicate=inspect.ismethod) + if not method_name.startswith("__") ): + # TODO: Raise if either classes don't have the method declared. cls_method = getattr(parent_class, defined_method) subclass_method = getattr(cls, defined_method) - cls_method_params = inspect.signature(cls_method).parameters - subclass_method_params = inspect.signature(subclass_method).parameters - if cls_method_params.keys() != subclass_method_params.keys(): - raise NotImplementedError(f"""Signature for {defined_method} not correct: - Expected: {list(cls_method_params.keys())} - Got: {list(subclass_method_params.keys())} + cls_method_params: dict = get_type_hints(cls_method) + subclass_method_params: dict = get_type_hints(subclass_method) + if len(cls_method_params) != len(subclass_method_params): + raise NotImplementedError(f"""Method parameters mismatch: + Expected: {cls_method_params.keys()} + Got: {subclass_method_params.keys()} """) + for cls_signature, subclass_signature in zip( + cls_method_params.items(), subclass_method_params.items() + ): + match_signature(cls_signature, subclass_signature) super().__init_subclass__(**kwargs) From e288445315442be2957f538c94cf8eab7471e52e Mon Sep 17 00:00:00 2001 From: codecakes Date: Thu, 22 Aug 2024 04:28:53 +0530 Subject: [PATCH 05/27] refactoring project structure to match a more decoupled logic away from blacksheep framework. most of these layers do not have to be tied to a framework's folder. any other framework can simply consume these layers as well --- xcov19/app/controllers/diagnose.py | 3 +- xcov19/app/controllers/geolocation.py | 5 +- xcov19/app/middleware.py | 2 +- xcov19/app/services.py | 72 ++------------------------- xcov19/app/settings.py | 6 +++ xcov19/{app => }/dto.py | 6 --- xcov19/services/__init__.py | 0 xcov19/services/geolocation.py | 68 +++++++++++++++++++++++++ xcov19/tests/conftest.py | 4 +- xcov19/tests/test_services.py | 7 ++- 10 files changed, 92 insertions(+), 81 deletions(-) rename xcov19/{app => }/dto.py (89%) create mode 100644 xcov19/services/__init__.py create mode 100644 xcov19/services/geolocation.py diff --git a/xcov19/app/controllers/diagnose.py b/xcov19/app/controllers/diagnose.py index 1c24fe9..e2104af 100644 --- a/xcov19/app/controllers/diagnose.py +++ b/xcov19/app/controllers/diagnose.py @@ -4,7 +4,8 @@ from blacksheep.server.controllers import APIController from xcov19.app.controllers import post -from xcov19.app.dto import DiagnosisQueryJSON, FromOriginMatchHeader +from xcov19.dto import DiagnosisQueryJSON +from xcov19.app.settings import FromOriginMatchHeader class DiagnosisController(APIController): diff --git a/xcov19/app/controllers/geolocation.py b/xcov19/app/controllers/geolocation.py index b364964..b6e904f 100644 --- a/xcov19/app/controllers/geolocation.py +++ b/xcov19/app/controllers/geolocation.py @@ -4,9 +4,10 @@ from blacksheep.server.controllers import APIController from xcov19.app.controllers import post -from xcov19.app.dto import FromOriginMatchHeader, LocationQueryJSON +from xcov19.dto import LocationQueryJSON +from xcov19.app.settings import FromOriginMatchHeader -from xcov19.app.services import LocationQueryServiceInterface +from xcov19.services.geolocation import LocationQueryServiceInterface class GeolocationController(APIController): diff --git a/xcov19/app/middleware.py b/xcov19/app/middleware.py index 040d362..a3b86a7 100644 --- a/xcov19/app/middleware.py +++ b/xcov19/app/middleware.py @@ -2,7 +2,7 @@ from blacksheep import Application, Request, Response, bad_request -from xcov19.app.dto import FromOriginMatchHeader +from xcov19.app.settings import FromOriginMatchHeader def configure_middleware(app: Application, *middlewares): diff --git a/xcov19/app/services.py b/xcov19/app/services.py index 3939018..81beaa8 100644 --- a/xcov19/app/services.py +++ b/xcov19/app/services.py @@ -10,17 +10,15 @@ from __future__ import annotations -import abc -from collections.abc import Callable -from typing import Protocol, Tuple, List, TypeVar +from typing import Tuple from rodi import Container -from xcov19.app.dto import Address, LocationQueryJSON, FacilitiesResult from xcov19.app.settings import Settings -from xcov19.utils.mixins import InterfaceProtocolCheckMixin - -T = TypeVar("T", bound=LocationQueryJSON) +from xcov19.services.geolocation import ( + LocationQueryServiceInterface, + GeolocationQueryService, +) def configure_services(settings: Settings) -> Tuple[Container, Settings]: @@ -30,63 +28,3 @@ def configure_services(settings: Settings) -> Tuple[Container, Settings]: container.add_singleton(LocationQueryServiceInterface, GeolocationQueryService) return container, settings - - -class LocationQueryServiceInterface[T: LocationQueryJSON](Protocol): - """Location aware service for listing faciltiies. - 1. Searches and fetches existing processed results by query_id for a cust_id - 2. Resolves coordinates from a given geolocation. - 3. Fetches all facilities from a given set of records for a - given radius from geolocation. - - Radius is default for now. - # TODO: Filter to be added - """ - - @classmethod - @abc.abstractmethod - async def resolve_coordinates( - cls, reverse_geo_lookup_svc: Callable[[T], dict], query: T - ) -> Address: - raise NotImplementedError - - @classmethod - @abc.abstractmethod - async def fetch_facilities( - cls, - reverse_geo_lookup_svc: Callable[[T], dict], - patient_query_lookup_svc: Callable[[Address, T], List[FacilitiesResult]], - query: T, - ) -> List[FacilitiesResult] | None: - raise NotImplementedError - - -# TODO: make hard-coded response functional -class GeolocationQueryService( - LocationQueryServiceInterface, InterfaceProtocolCheckMixin -): - @classmethod - async def resolve_coordinates( - cls, - reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], - query: LocationQueryJSON, - ) -> Address: - """Resolves to address by geo reverse lookup.""" - return Address(**reverse_geo_lookup_svc(query)) - - @classmethod - async def fetch_facilities( - cls, - reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], - patient_query_lookup_svc: Callable[ - [Address, LocationQueryJSON], List[FacilitiesResult] - ], - query: LocationQueryJSON, - ) -> List[FacilitiesResult] | None: - """Fetches facilities for a query location for a query id for a customer.""" - patient_address = await cls.resolve_coordinates(reverse_geo_lookup_svc, query) - return patient_query_lookup_svc(patient_address, query) or None - - -# TODO: Implement reverse_geo_lookup_svc -# TODO: Implement following method in patient_query_lookup_svc: diff --git a/xcov19/app/settings.py b/xcov19/app/settings.py index 9425f80..d48ce14 100644 --- a/xcov19/app/settings.py +++ b/xcov19/app/settings.py @@ -7,6 +7,7 @@ https://docs.pydantic.dev/latest/usage/settings/ """ +from blacksheep import FromHeader from pydantic import BaseModel from pydantic_settings import BaseSettings, SettingsConfigDict @@ -38,3 +39,8 @@ class Settings(BaseSettings): def load_settings() -> Settings: return Settings() + + +class FromOriginMatchHeader(FromHeader[str]): + name = "X-Origin-Match-Header" + secret = "secret" diff --git a/xcov19/app/dto.py b/xcov19/dto.py similarity index 89% rename from xcov19/app/dto.py rename to xcov19/dto.py index b71c312..67aae64 100644 --- a/xcov19/app/dto.py +++ b/xcov19/dto.py @@ -1,14 +1,8 @@ -from blacksheep import FromHeader from pydantic import BaseModel, Field from typing import Annotated, List -class FromOriginMatchHeader(FromHeader[str]): - name = "X-Origin-Match-Header" - secret = "secret" - - class GeoLocation(BaseModel): lat: float lng: float diff --git a/xcov19/services/__init__.py b/xcov19/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/xcov19/services/geolocation.py b/xcov19/services/geolocation.py new file mode 100644 index 0000000..79afdd0 --- /dev/null +++ b/xcov19/services/geolocation.py @@ -0,0 +1,68 @@ +from __future__ import annotations + +import abc +from typing import TypeVar, Protocol, Callable, List + +from xcov19.dto import LocationQueryJSON, Address, FacilitiesResult +from xcov19.utils.mixins import InterfaceProtocolCheckMixin + +T = TypeVar("T", bound=LocationQueryJSON) + + +class LocationQueryServiceInterface[T: LocationQueryJSON](Protocol): + """Location aware service for listing faciltiies. + 1. Searches and fetches existing processed results by query_id for a cust_id + 2. Resolves coordinates from a given geolocation. + 3. Fetches all facilities from a given set of records for a + given radius from geolocation. + + Radius is default for now. + # TODO: Filter to be added + """ + + @classmethod + @abc.abstractmethod + async def resolve_coordinates( + cls, reverse_geo_lookup_svc: Callable[[T], dict], query: T + ) -> Address: + raise NotImplementedError + + @classmethod + @abc.abstractmethod + async def fetch_facilities( + cls, + reverse_geo_lookup_svc: Callable[[T], dict], + patient_query_lookup_svc: Callable[[Address, T], List[FacilitiesResult]], + query: T, + ) -> List[FacilitiesResult] | None: + raise NotImplementedError + + +class GeolocationQueryService( + LocationQueryServiceInterface, InterfaceProtocolCheckMixin +): + @classmethod + async def resolve_coordinates( + cls, + reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + query: LocationQueryJSON, + ) -> Address: + """Resolves to address by geo reverse lookup.""" + return Address(**reverse_geo_lookup_svc(query)) + + @classmethod + async def fetch_facilities( + cls, + reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + patient_query_lookup_svc: Callable[ + [Address, LocationQueryJSON], List[FacilitiesResult] + ], + query: LocationQueryJSON, + ) -> List[FacilitiesResult] | None: + """Fetches facilities for a query location for a query id for a customer.""" + patient_address = await cls.resolve_coordinates(reverse_geo_lookup_svc, query) + return patient_query_lookup_svc(patient_address, query) or None + + +# TODO: Implement reverse_geo_lookup_svc +# TODO: Implement following method in patient_query_lookup_svc: diff --git a/xcov19/tests/conftest.py b/xcov19/tests/conftest.py index 812f19e..16d8682 100644 --- a/xcov19/tests/conftest.py +++ b/xcov19/tests/conftest.py @@ -3,7 +3,7 @@ import pytest -from xcov19.app.dto import ( +from xcov19.dto import ( AnonymousId, GeoLocation, LocationQueryJSON, @@ -11,7 +11,7 @@ Address, FacilitiesResult, ) -from xcov19.app.services import LocationQueryServiceInterface +from xcov19.services.geolocation import LocationQueryServiceInterface from xcov19.utils.mixins import InterfaceProtocolCheckMixin # Same as using @pytest.mark.anyio diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index f219e18..7b0139b 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,8 +1,11 @@ import pytest import unittest -from xcov19.app.services import LocationQueryServiceInterface, GeolocationQueryService -from xcov19.app.dto import Address, LocationQueryJSON, FacilitiesResult, GeoLocation +from xcov19.services.geolocation import ( + LocationQueryServiceInterface, + GeolocationQueryService, +) +from xcov19.dto import Address, LocationQueryJSON, FacilitiesResult, GeoLocation def dummy_reverse_geo_lookup_svc(query: LocationQueryJSON) -> dict: From d1c733f231eb2dcee61efb378fe68326272fac1f Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 26 Aug 2024 01:40:50 +0530 Subject: [PATCH 06/27] added todos command to check all TODOs in project. updated repository interface --- Makefile | 3 +++ xcov19/domain/repository.py | 24 -------------------- xcov19/domain/repository_interface.py | 32 +++++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 24 deletions(-) delete mode 100644 xcov19/domain/repository.py create mode 100644 xcov19/domain/repository_interface.py diff --git a/Makefile b/Makefile index b077b07..2c1fd79 100644 --- a/Makefile +++ b/Makefile @@ -12,3 +12,6 @@ pip-install: test: pytest -s xcov19/tests/ + +todos: + @grep -rn "TODO:" xcov19/ --exclude-dir=node_modules --include="*.py" \ No newline at end of file diff --git a/xcov19/domain/repository.py b/xcov19/domain/repository.py deleted file mode 100644 index fb39f06..0000000 --- a/xcov19/domain/repository.py +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Protocol -import abc - -from xcov19.domain.models.patient import Patient -from xcov19.domain.models.provider import Provider - - -class IPatientStore[T: Patient](Protocol): - @classmethod - @abc.abstractmethod - def enqueue_diagnosis_query(cls, patient: T): - raise NotImplementedError - - @classmethod - @abc.abstractmethod - def enqueue_geolocation_query(cls, patient: T): - raise NotImplementedError - - -class IProviderStore[T: Provider](Protocol): - @classmethod - @abc.abstractmethod - def fetch(cls, provider: T): - raise NotImplementedError diff --git a/xcov19/domain/repository_interface.py b/xcov19/domain/repository_interface.py new file mode 100644 index 0000000..eb7ea85 --- /dev/null +++ b/xcov19/domain/repository_interface.py @@ -0,0 +1,32 @@ +from typing import Protocol, TypeVar, List +import abc + +from xcov19.domain.models.patient import Patient +from xcov19.domain.models.provider import Provider + +PatientT = TypeVar("PatientT", bound=Patient) +ProviderT = TypeVar("ProviderT", bound=Patient) + + +class IPatientStore[PatientT: Patient](Protocol): + @classmethod + @abc.abstractmethod + def enqueue_diagnosis_query(cls, patient: PatientT): + raise NotImplementedError + + @classmethod + @abc.abstractmethod + def enqueue_geolocation_query(cls, patient: PatientT): + raise NotImplementedError + + +class IProviderRepository[ProviderT: Provider](Protocol): + @abc.abstractmethod + def fetch_by_providers(self, **address: dict[str, str]) -> List[ProviderT]: + raise NotImplementedError + + @abc.abstractmethod + def fetch_by_query( + self, query_id: str, filtered_providers: List[ProviderT] + ) -> List[ProviderT]: + raise NotImplementedError From ad6805ef4ed1e7a4968df00bd03c76a8cbc28bd9 Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 26 Aug 2024 02:02:46 +0530 Subject: [PATCH 07/27] refactored code to adhere to interface requirements and implement missing functionality to make tests green --- xcov19/services/geolocation.py | 16 ++-- xcov19/tests/conftest.py | 4 +- xcov19/tests/test_services.py | 131 +++++++++++++++++++++++++++++---- 3 files changed, 126 insertions(+), 25 deletions(-) diff --git a/xcov19/services/geolocation.py b/xcov19/services/geolocation.py index 79afdd0..ca46d4a 100644 --- a/xcov19/services/geolocation.py +++ b/xcov19/services/geolocation.py @@ -9,6 +9,9 @@ T = TypeVar("T", bound=LocationQueryJSON) +# Application services + + class LocationQueryServiceInterface[T: LocationQueryJSON](Protocol): """Location aware service for listing faciltiies. 1. Searches and fetches existing processed results by query_id for a cust_id @@ -32,14 +35,14 @@ async def resolve_coordinates( async def fetch_facilities( cls, reverse_geo_lookup_svc: Callable[[T], dict], - patient_query_lookup_svc: Callable[[Address, T], List[FacilitiesResult]], query: T, + patient_query_lookup_svc: Callable[[Address, T], List[FacilitiesResult]], ) -> List[FacilitiesResult] | None: raise NotImplementedError class GeolocationQueryService( - LocationQueryServiceInterface, InterfaceProtocolCheckMixin + LocationQueryServiceInterface[LocationQueryJSON], InterfaceProtocolCheckMixin ): @classmethod async def resolve_coordinates( @@ -54,15 +57,12 @@ async def resolve_coordinates( async def fetch_facilities( cls, reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + query: LocationQueryJSON, patient_query_lookup_svc: Callable[ - [Address, LocationQueryJSON], List[FacilitiesResult] + [Address, LocationQueryJSON], + List[FacilitiesResult], ], - query: LocationQueryJSON, ) -> List[FacilitiesResult] | None: """Fetches facilities for a query location for a query id for a customer.""" patient_address = await cls.resolve_coordinates(reverse_geo_lookup_svc, query) return patient_query_lookup_svc(patient_address, query) or None - - -# TODO: Implement reverse_geo_lookup_svc -# TODO: Implement following method in patient_query_lookup_svc: diff --git a/xcov19/tests/conftest.py b/xcov19/tests/conftest.py index 16d8682..c7051d9 100644 --- a/xcov19/tests/conftest.py +++ b/xcov19/tests/conftest.py @@ -34,7 +34,7 @@ def dummy_coordinates(): @pytest.fixture(scope="class") -def dummy_geolocation(dummy_coordinates): +def dummy_geolocation_query_json(dummy_coordinates): return LocationQueryJSON( location=dummy_coordinates, cust_id=AnonymousId(cust_id="test_cust_id"), @@ -62,10 +62,10 @@ async def resolve_coordinates( async def fetch_facilities( cls, reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + query: LocationQueryJSON, patient_query_lookup_svc: Callable[ [Address, LocationQueryJSON], List[FacilitiesResult] ], - query: LocationQueryJSON, ) -> List[FacilitiesResult] | None: return [ FacilitiesResult( diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index 7b0139b..3dafb04 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,5 +1,13 @@ +from typing import List import pytest import unittest +from xcov19.domain.models.provider import ( + Contact, + FacilityEstablishment, + FacilityOwnership, + Provider, +) +from xcov19.domain.repository_interface import IProviderRepository from xcov19.services.geolocation import ( LocationQueryServiceInterface, @@ -8,6 +16,13 @@ from xcov19.dto import Address, LocationQueryJSON, FacilitiesResult, GeoLocation +from xcov19.utils.mixins import InterfaceProtocolCheckMixin + +import random + +RANDOM_SEED = random.seed(1) + + def dummy_reverse_geo_lookup_svc(query: LocationQueryJSON) -> dict: return {} @@ -30,47 +45,118 @@ def dummy_patient_query_lookup_svc(address: Address, query: LocationQueryJSON) - specialties=["surgery", "pediatrics"], stars=4, reviews=120, - rank=2, + rank=random.randint(1, 20), estimated_time=20, ) ] -@pytest.mark.usefixtures("dummy_geolocation", "stub_location_srvc") +class DummyProviderRepo(IProviderRepository[Provider], InterfaceProtocolCheckMixin): + def fetch_by_providers(self, **address: dict[str, str]) -> List[Provider]: + return [ + Provider( + name="Dummy Hospital", + address="123 Test Street", + geo_location=(0.0, 0.0), + contact=Contact("+1234567890"), + facility_type=FacilityEstablishment.HOSPITAL, + ownership=FacilityOwnership.PRIVATE, + specialties=["General", "Surgery"], + stars=4, + reviews=100, + ) + ] + + def fetch_by_query( + self, query_id: str, filtered_providers: List[Provider] + ) -> List[Provider]: + return [ + Provider( + name="Dummy Hospital", + address="123 Test Street", + geo_location=(0.0, 0.0), + contact=Contact("+1234567890"), + facility_type=FacilityEstablishment.HOSPITAL, + ownership=FacilityOwnership.PRIVATE, + specialties=["General", "Surgery"], + stars=4, + reviews=100, + ) + ] + + +def stub_get_facilities_by_patient_query( + patient_address: Address, + query: LocationQueryJSON, + repo: IProviderRepository = DummyProviderRepo(), +) -> List[FacilitiesResult]: + facilities_result = [] + providers = repo.fetch_by_providers(**patient_address.model_dump(round_trip=True)) + filtered_providers = repo.fetch_by_query(query.query_id.query_id, providers) + for provider in filtered_providers: + address_name, address1, address2 = provider.address.split() + address = Address(name=address_name, street=f"{address1} {address2}") + geolocation = GeoLocation( + lat=provider.geo_location[0], lng=provider.geo_location[1] + ) + + facilities_result += [ + FacilitiesResult( + name=provider.name, + address=address, + geolocation=geolocation, + contact=provider.contact.value, + facility_type=provider.facility_type.value, + ownership=provider.ownership.value, + specialties=provider.specialties, + stars=provider.stars, + reviews=provider.reviews, + rank=random.randint(1, 20), + estimated_time=20, + ) + ] + return facilities_result + + +@pytest.mark.usefixtures("dummy_geolocation_query_json", "stub_location_srvc") class GeoLocationServiceInterfaceTest(unittest.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) def autouse( self, - dummy_geolocation: LocationQueryJSON, + dummy_geolocation_query_json: LocationQueryJSON, stub_location_srvc: LocationQueryServiceInterface, ): - self.dummy_geolocation = dummy_geolocation + self.dummy_geolocation_query_json = dummy_geolocation_query_json self.stub_location_srvc = stub_location_srvc async def test_resolve_coordinates(self): result = await self.stub_location_srvc.resolve_coordinates( - dummy_reverse_geo_lookup_svc, self.dummy_geolocation + dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json ) self.assertEqual(Address(), result) async def test_fetch_facilities(self): result = await self.stub_location_srvc.fetch_facilities( dummy_reverse_geo_lookup_svc, + self.dummy_geolocation_query_json, dummy_patient_query_lookup_svc, - self.dummy_geolocation, ) self.assertIsInstance(result, list) + assert result + self.assertTrue( + all(isinstance(provider, FacilitiesResult) for provider in result) + ) -@pytest.mark.usefixtures("dummy_geolocation", "stub_location_srvc") +@pytest.mark.usefixtures("dummy_geolocation_query_json") class GeoLocationServiceTest(unittest.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) - def autouse(self, dummy_geolocation: LocationQueryJSON): - self.dummy_geolocation = dummy_geolocation + def autouse(self, dummy_geolocation_query_json: LocationQueryJSON): + self.dummy_geolocation_query_json = dummy_geolocation_query_json async def test_resolve_coordinates(self): result = await GeolocationQueryService.resolve_coordinates( - dummy_reverse_geo_lookup_svc, self.dummy_geolocation + dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json ) expected = Address() self.assertEqual(expected, result, f"Got {result}, expected {expected}") @@ -78,8 +164,8 @@ async def test_resolve_coordinates(self): async def test_fetch_facilities(self): result = await GeolocationQueryService.fetch_facilities( dummy_reverse_geo_lookup_svc, + self.dummy_geolocation_query_json, dummy_patient_query_lookup_svc, - self.dummy_geolocation, ) self.assertIsNotNone(result) record = None @@ -90,12 +176,27 @@ async def test_fetch_facilities(self): async def test_fetch_facilities_no_results(self): result = await GeolocationQueryService.fetch_facilities( dummy_reverse_geo_lookup_svc, + self.dummy_geolocation_query_json, dummy_patient_query_lookup_svc_none, - self.dummy_geolocation, ) self.assertIsNone(result) -class PatientQueryLookupSvcTest(unittest.TestCase): - @pytest.mark.skip("In a seperate class.") - def test_patient_query_lookup_svc(self): ... +# @pytest.mark.skip("WIP") +@pytest.mark.usefixtures("dummy_geolocation_query_json") +class PatientQueryLookupSvcTest(unittest.IsolatedAsyncioTestCase): + @pytest.fixture(autouse=True) + def autouse(self, dummy_geolocation_query_json: LocationQueryJSON): + self.dummy_geolocation_query_json = dummy_geolocation_query_json + + async def test_patient_query_lookup_svc(self): + providers = await GeolocationQueryService.fetch_facilities( + dummy_reverse_geo_lookup_svc, + self.dummy_geolocation_query_json, + stub_get_facilities_by_patient_query, + ) + self.assertIsInstance(providers, list) + assert providers + self.assertTrue( + all(isinstance(provider, FacilitiesResult) for provider in providers) + ) From e99d7416ecc2d8c1452f8c23aecdd4dc0d4ac85f Mon Sep 17 00:00:00 2001 From: codecakes Date: Fri, 30 Aug 2024 15:37:24 +0530 Subject: [PATCH 08/27] update libraries for adding sqlite orm support --- poetry.lock | 298 ++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 4 + xcov19/app/main.py | 1 + 3 files changed, 297 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2ce8b75..40065ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,23 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "alembic" +version = "1.13.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] [[package]] name = "annotated-types" @@ -331,6 +350,81 @@ pyyaml = ">=5.3.1" requests = ">=2.23.0" rich = "*" +[[package]] +name = "cython" +version = "3.0.11" +description = "The Cython compiler for writing C extensions in the Python language." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891"}, + {file = "Cython-3.0.11-cp310-cp310-win32.whl", hash = "sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a"}, + {file = "Cython-3.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38"}, + {file = "Cython-3.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed"}, + {file = "Cython-3.0.11-cp311-cp311-win32.whl", hash = "sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1"}, + {file = "Cython-3.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3"}, + {file = "Cython-3.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579"}, + {file = "Cython-3.0.11-cp312-cp312-win32.whl", hash = "sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00"}, + {file = "Cython-3.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8"}, + {file = "Cython-3.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12"}, + {file = "Cython-3.0.11-cp313-cp313-win32.whl", hash = "sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48"}, + {file = "Cython-3.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31"}, + {file = "Cython-3.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3"}, + {file = "Cython-3.0.11-cp36-cp36m-win32.whl", hash = "sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4"}, + {file = "Cython-3.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c"}, + {file = "Cython-3.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0"}, + {file = "Cython-3.0.11-cp37-cp37m-win32.whl", hash = "sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d"}, + {file = "Cython-3.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057"}, + {file = "Cython-3.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162"}, + {file = "Cython-3.0.11-cp38-cp38-win32.whl", hash = "sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2"}, + {file = "Cython-3.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2"}, + {file = "Cython-3.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7"}, + {file = "Cython-3.0.11-cp39-cp39-win32.whl", hash = "sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059"}, + {file = "Cython-3.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c"}, + {file = "Cython-3.0.11-py2.py3-none-any.whl", hash = "sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d"}, + {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, +] + [[package]] name = "deepmerge" version = "1.1.1" @@ -420,6 +514,77 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + [[package]] name = "guardpost" version = "1.0.2" @@ -620,6 +785,25 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "mako" +version = "1.3.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -892,7 +1076,7 @@ files = [ name = "prompt-toolkit" version = "3.0.36" description = "Library for building powerful interactive command lines in Python" -optional = false +optional = true python-versions = ">=3.6.2" files = [ {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, @@ -1226,7 +1410,7 @@ files = [ name = "questionary" version = "2.0.1" description = "Python library to build pretty command line user prompts ⭐️" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "questionary-2.0.1-py3-none-any.whl", hash = "sha256:8ab9a01d0b91b68444dff7f6652c1e754105533f083cbe27597c8110ecc230a2"}, @@ -1355,6 +1539,108 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "sqlalchemy" +version = "2.0.32" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-win32.whl", hash = "sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da"}, + {file = "SQLAlchemy-2.0.32-cp37-cp37m-win_amd64.whl", hash = "sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-win32.whl", hash = "sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65"}, + {file = "SQLAlchemy-2.0.32-cp38-cp38-win_amd64.whl", hash = "sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-win32.whl", hash = "sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78"}, + {file = "SQLAlchemy-2.0.32-cp39-cp39-win_amd64.whl", hash = "sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84"}, + {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, + {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlmodel" +version = "0.0.21" +description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sqlmodel-0.0.21-py3-none-any.whl", hash = "sha256:eca104afe8a643f0764076b29f02e51d19d6b35c458f4c119942960362a4b52a"}, + {file = "sqlmodel-0.0.21.tar.gz", hash = "sha256:b2034c23d930f66d2091b17a4280a9c23a7ea540a71e7fcf9c746d262f06f74a"}, +] + +[package.dependencies] +pydantic = ">=1.10.13,<3.0.0" +SQLAlchemy = ">=2.0.14,<2.1.0" + [[package]] name = "text-unidecode" version = "1.3" @@ -1370,7 +1656,7 @@ files = [ name = "types-python-dateutil" version = "2.9.0.20240821" description = "Typing stubs for python-dateutil" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "types-python-dateutil-2.9.0.20240821.tar.gz", hash = "sha256:9649d1dcb6fef1046fb18bebe9ea2aa0028b160918518c34589a46045f6ebd98"}, @@ -1473,7 +1759,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -optional = false +optional = true python-versions = "*" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, @@ -1502,4 +1788,4 @@ test = ["anyio", "pytest", "pytest-asyncio"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "e9efdca594aaccfd01eeebd8dc01a3166cdeadf2b3b7ee78131de5255f60ac39" +content-hash = "e0efa2d76594a35409ee46a7221fe770f062a1fcefd5ff5bd4c3947b6efdcf3d" diff --git a/pyproject.toml b/pyproject.toml index c2e922b..49089ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,10 @@ Hypercorn = "^0.17.3" MarkupSafe = "^2.1.3" uvloop = {version = "^0.20.0", markers = "sys_platform != 'win32'"} pydantic-settings = {version = "^2.3.4", markers = "sys_platform != 'win32'"} +cython = "^3.0.11" +sqlalchemy = {version="^2.0.23", markers = "sys_platform != 'win32'", extras = ["asyncio"]} +alembic = "^1.13.2" +sqlmodel = {version="^0.0.21"} # [tool.poetry.group.dev.dependencies] diff --git a/xcov19/app/main.py b/xcov19/app/main.py index d9eaf8c..c1e2286 100644 --- a/xcov19/app/main.py +++ b/xcov19/app/main.py @@ -27,6 +27,7 @@ def configure_application( configure_authentication(app, settings) configure_middleware(app, origin_header_middleware) configure_docs(app, settings) + # TODO: Add configure_database(app, settings) return app From 4f24bd9dd46ff9955e6d0cd717462b4b156f4556 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 3 Sep 2024 01:52:13 +0530 Subject: [PATCH 09/27] added support libraries --- poetry.lock | 20 +++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index a0a98f0..4baf263 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,23 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "aiosqlite" +version = "0.20.0" +description = "asyncio bridge to the standard sqlite3 module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, + {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, +] + +[package.dependencies] +typing_extensions = ">=4.0" + +[package.extras] +dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] +docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] + [[package]] name = "alembic" version = "1.13.2" @@ -1788,4 +1806,4 @@ test = ["anyio", "pytest", "pytest-asyncio"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "3816fb075ff35f5d1421e89542bd1b04c3533eb8bceb3b750eaea0996d89134e" +content-hash = "1507033c45d82ece31fe123e08723b0af1845e1a27c6c9b802b27443ca9b86fd" diff --git a/pyproject.toml b/pyproject.toml index 4141200..09c67c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ pydantic-settings = {version = "^2.3.4", markers = "sys_platform != 'win32'"} cython = "^3.0.11" sqlalchemy = {version="^2.0.23", markers = "sys_platform != 'win32'", extras = ["asyncio"]} alembic = "^1.13.2" +aiosqlite = "^0.20.0" sqlmodel = {version="^0.0.21"} # [tool.poetry.group.dev.dependencies] From ef851764b44301e2617b8cce63b00a16c5256eae Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 3 Sep 2024 02:07:12 +0530 Subject: [PATCH 10/27] implements configure_database_session(services, settings) and on_start --- xcov19/app/database.py | 90 ++++++++++++++++++++++++++++++++++++++++++ xcov19/app/main.py | 21 +++++++++- xcov19/app/services.py | 1 + xcov19/app/settings.py | 2 + xcov19/dev.py | 3 ++ 5 files changed, 115 insertions(+), 2 deletions(-) create mode 100644 xcov19/app/database.py diff --git a/xcov19/app/database.py b/xcov19/app/database.py new file mode 100644 index 0000000..6e8c07f --- /dev/null +++ b/xcov19/app/database.py @@ -0,0 +1,90 @@ +from collections.abc import AsyncGenerator +import sys +from rodi import Container +from sqlmodel import SQLModel + +from xcov19.app.settings import Settings +from sqlalchemy.ext.asyncio import ( + create_async_engine, + AsyncEngine, + AsyncSession, + async_sessionmaker, +) + +import logging +from sqlalchemy.pool import AsyncAdaptedQueuePool + +db_logger = logging.getLogger(__name__) +db_fmt = logging.Formatter( + "DATABASE:%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) +stream_handler = logging.StreamHandler(sys.stdout) +stream_handler.setFormatter(db_fmt) + +db_logger.setLevel(logging.INFO) +db_logger.addHandler(stream_handler) + + +class SessionFactory: + """Class to remember sessionmaker factory constructor for DI container. + + Use like this to retrieve sessionmaker from DI container: + container.resolve(SessionFactory) + + It is already added as in `configure_database_session`: + container.add_singleton_by_factory(SessionFactory(engine), SessionFactory) + """ + + def __init__(self, engine: AsyncEngine): + self._engine = engine + + def __call__(self) -> async_sessionmaker[AsyncSession]: + return async_sessionmaker( + self._engine, class_=AsyncSession, expire_on_commit=False + ) + + +async def setup_database(engine: AsyncEngine) -> None: + """Sets up tables for database.""" + async with engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + + +async def create_async_session( + AsyncSessionFactory: async_sessionmaker[AsyncSession], +) -> AsyncGenerator[AsyncSession, None]: + """Create an asynchronous database session.""" + async with AsyncSessionFactory() as session: + try: + yield session + finally: + await session.close() + + +async def start_db_session(container: Container): + """Starts a new database session given SessionFactory.""" + # add LocalAsyncSession + local_async_session = create_async_session( + container.resolve(async_sessionmaker[AsyncSession]) + ) + container.add_instance(local_async_session, AsyncSession) + + +def configure_database_session(container: Container, settings: Settings) -> Container: + """Configure database session setup for the application.""" + # add engine + db_logger.info(f"""====== Configuring database session. ====== + DB_ENGINE_URL: {settings.db_engine_url} + """) + engine = create_async_engine( + settings.db_engine_url, echo=True, poolclass=AsyncAdaptedQueuePool + ) + container.add_instance(engine, AsyncEngine) + + # add sessionmaker + container.add_singleton_by_factory( + SessionFactory(engine), async_sessionmaker[AsyncSession] + ) + + db_logger.info("====== Database session configured. ======") + return container diff --git a/xcov19/app/main.py b/xcov19/app/main.py index c1e2286..7da3d5e 100644 --- a/xcov19/app/main.py +++ b/xcov19/app/main.py @@ -3,8 +3,13 @@ """ from blacksheep import Application -from rodi import Container +from rodi import Container, ContainerProtocol +from xcov19.app.database import ( + configure_database_session, + setup_database, + start_db_session, +) from xcov19.app.auth import configure_authentication from xcov19.app.controllers import controller_router from xcov19.app.docs import configure_docs @@ -13,6 +18,8 @@ from xcov19.app.services import configure_services from xcov19.app.settings import load_settings, Settings +from sqlalchemy.ext.asyncio import AsyncEngine + def configure_application( services: Container, @@ -27,8 +34,18 @@ def configure_application( configure_authentication(app, settings) configure_middleware(app, origin_header_middleware) configure_docs(app, settings) - # TODO: Add configure_database(app, settings) + configure_database_session(services, settings) return app app = configure_application(*configure_services(load_settings())) + + +@app.on_start +async def on_start(): + container: ContainerProtocol = app.services + if not isinstance(container, Container): + raise ValueError("Container is not a valid container") + await start_db_session(container) + engine = container.resolve(AsyncEngine) + await setup_database(engine) diff --git a/xcov19/app/services.py b/xcov19/app/services.py index 81beaa8..3bb338b 100644 --- a/xcov19/app/services.py +++ b/xcov19/app/services.py @@ -14,6 +14,7 @@ from rodi import Container + from xcov19.app.settings import Settings from xcov19.services.geolocation import ( LocationQueryServiceInterface, diff --git a/xcov19/app/settings.py b/xcov19/app/settings.py index d48ce14..1cd9520 100644 --- a/xcov19/app/settings.py +++ b/xcov19/app/settings.py @@ -34,6 +34,8 @@ class Settings(BaseSettings): # export app_app='{"show_error_details": True}' app: App = App() + db_engine_url: str = "sqlite+aiosqlite:///" # "sqlite+aiosqlite:///xcov19.db" + model_config = SettingsConfigDict(env_prefix="APP_") diff --git a/xcov19/dev.py b/xcov19/dev.py index 0c9d1fe..6f2eade 100644 --- a/xcov19/dev.py +++ b/xcov19/dev.py @@ -44,6 +44,9 @@ config.bind = [f"0.0.0.0:{port}"] config.debug = True + config.accesslog = "-" + config.errorlog = "-" + config.use_reloader = True asyncio.run(serve(app, config)) From 916defee623a50a65b6fbc34068ee17e4791491c Mon Sep 17 00:00:00 2001 From: codecakes Date: Sat, 7 Sep 2024 22:54:49 +0530 Subject: [PATCH 11/27] refactored unit tests to have dummy functions move to conftest. fix their type annotaitons. added e2e api testing support in start_server and conftest. added unit test configuration setup in pyproject. ignore any db files in project --- .gitignore | 3 +- pyproject.toml | 6 ++ xcov19/tests/conftest.py | 61 +++++++++++++++- xcov19/tests/start_server.py | 13 ++++ xcov19/tests/test_geolocation_api.py | 37 ++++++++++ xcov19/tests/test_services.py | 105 ++++++++++++++++----------- 6 files changed, 179 insertions(+), 46 deletions(-) create mode 100644 xcov19/tests/start_server.py create mode 100644 xcov19/tests/test_geolocation_api.py diff --git a/.gitignore b/.gitignore index a25237b..fb8ed9d 100644 --- a/.gitignore +++ b/.gitignore @@ -52,10 +52,11 @@ coverage.xml *.mo *.pot -# Django stuff: +# Miscellaneuos configuration stuff: *.log local_settings.py db.sqlite3 +* .db # Flask stuff: instance/ diff --git a/pyproject.toml b/pyproject.toml index fe20046..b3e7118 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,12 @@ testpaths = [ "xcov19/tests", ] asyncio_mode = "auto" +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", + "integration: marks tests as integration tests", + "unit: marks tests as unit tests", + # Add more markers as needed +] [tool.pyright] pythonVersion = "3.12" diff --git a/xcov19/tests/conftest.py b/xcov19/tests/conftest.py index c7051d9..3b5150a 100644 --- a/xcov19/tests/conftest.py +++ b/xcov19/tests/conftest.py @@ -1,8 +1,10 @@ from collections.abc import Callable from typing import List +from blacksheep.testing import TestClient import pytest +from xcov19.tests.start_server import start_server from xcov19.dto import ( AnonymousId, GeoLocation, @@ -14,6 +16,10 @@ from xcov19.services.geolocation import LocationQueryServiceInterface from xcov19.utils.mixins import InterfaceProtocolCheckMixin +import random + +RANDOM_SEED = random.seed(1) + # Same as using @pytest.mark.anyio pytestmark = pytest.mark.anyio @@ -43,8 +49,43 @@ def dummy_geolocation_query_json(dummy_coordinates): @pytest.fixture(scope="class") -def stub_location_srvc(): - return StubLocationQueryServiceImpl +def dummy_reverse_geo_lookup_svc() -> Callable[[LocationQueryJSON], dict]: + def callback(query: LocationQueryJSON) -> dict: + return {} + + return callback + + +@pytest.fixture(scope="class") +def dummy_patient_query_lookup_svc_none() -> ( + Callable[[Address, LocationQueryJSON], list] +): + def callback(address: Address, query: LocationQueryJSON) -> list: + return [] + + return callback + + +@pytest.fixture(scope="class") +def dummy_patient_query_lookup_svc() -> Callable[[Address, LocationQueryJSON], list]: + def callback(address: Address, query: LocationQueryJSON) -> list: + return [ + FacilitiesResult( + name="Test facility", + address=Address(), + geolocation=GeoLocation(lat=0.0, lng=0.0), + contact="+919999999999", + facility_type="nursing", + ownership="charity", + specialties=["surgery", "pediatrics"], + stars=4, + reviews=120, + rank=random.randint(1, 20), + estimated_time=20, + ) + ] + + return callback class StubLocationQueryServiceImpl( @@ -82,3 +123,19 @@ async def fetch_facilities( estimated_time=20, ) ] + + +@pytest.fixture(scope="class") +def stub_location_srvc() -> LocationQueryServiceInterface: + return StubLocationQueryServiceImpl + + +@pytest.fixture(scope="function", name="client") +async def test_client() -> TestClient: + # Create a test client + app = await anext(start_server()) + return TestClient(app) + + +@pytest.fixture(scope="function", name="db_setup") +async def setup_database(): ... diff --git a/xcov19/tests/start_server.py b/xcov19/tests/start_server.py new file mode 100644 index 0000000..f1a7634 --- /dev/null +++ b/xcov19/tests/start_server.py @@ -0,0 +1,13 @@ +from collections.abc import AsyncGenerator +from xcov19.app.main import app +from blacksheep import Application + + +async def start_server() -> AsyncGenerator[Application, None]: + """Start a test server for automated testing.""" + try: + await app.start() + yield app + finally: + if app.started: + await app.stop() diff --git a/xcov19/tests/test_geolocation_api.py b/xcov19/tests/test_geolocation_api.py new file mode 100644 index 0000000..083b13e --- /dev/null +++ b/xcov19/tests/test_geolocation_api.py @@ -0,0 +1,37 @@ +import json +import pytest +from xcov19.dto import LocationQueryJSON, GeoLocation, AnonymousId, QueryId +from blacksheep import Content, Response + + +@pytest.mark.integration +@pytest.mark.usefixtures("client") +class TestGeolocationAPI: + async def test_location_query_endpoint(self, client): + # Prepare the request payload + location_query = LocationQueryJSON( + location=GeoLocation(lat=0, lng=0), + cust_id=AnonymousId(cust_id="test_cust_id"), + query_id=QueryId(query_id="test_query_id"), + ) + + # Send a POST request to the /geo endpoint + query = location_query.model_dump(round_trip=True) + binary_data = json.dumps(query).encode("utf-8") + print("binary data", binary_data, type(binary_data)) + response: Response = await client.post( + "/geo", + content=Content(b"application/json", binary_data), + # Add the required header + headers={ + "X-Origin-Match-Header": "secret", + }, + ) + + # The current implementation returns ok(), which is null in JSON + # response_text = await response.text() + # assert response_text.lower() == "resource not found" + # Assert the response + assert response.content_type() == b"text/plain; charset=utf-8" + # assert response.content == b'' + assert response.status == 200 diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index 3dafb04..5979b16 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,3 +1,4 @@ +from collections.abc import Callable from typing import List import pytest import unittest @@ -23,34 +24,6 @@ RANDOM_SEED = random.seed(1) -def dummy_reverse_geo_lookup_svc(query: LocationQueryJSON) -> dict: - return {} - - -def dummy_patient_query_lookup_svc_none( - address: Address, query: LocationQueryJSON -) -> list: - return [] - - -def dummy_patient_query_lookup_svc(address: Address, query: LocationQueryJSON) -> list: - return [ - FacilitiesResult( - name="Test facility", - address=Address(), - geolocation=GeoLocation(lat=0.0, lng=0.0), - contact="+919999999999", - facility_type="nursing", - ownership="charity", - specialties=["surgery", "pediatrics"], - stars=4, - reviews=120, - rank=random.randint(1, 20), - estimated_time=20, - ) - ] - - class DummyProviderRepo(IProviderRepository[Provider], InterfaceProtocolCheckMixin): def fetch_by_providers(self, **address: dict[str, str]) -> List[Provider]: return [ @@ -118,28 +91,37 @@ def stub_get_facilities_by_patient_query( return facilities_result -@pytest.mark.usefixtures("dummy_geolocation_query_json", "stub_location_srvc") +@pytest.mark.usefixtures( + "dummy_geolocation_query_json", + "dummy_reverse_geo_lookup_svc", + "dummy_patient_query_lookup_svc", + "stub_location_srvc", +) class GeoLocationServiceInterfaceTest(unittest.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) def autouse( self, dummy_geolocation_query_json: LocationQueryJSON, + dummy_reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + dummy_patient_query_lookup_svc: Callable[[Address, LocationQueryJSON], list], stub_location_srvc: LocationQueryServiceInterface, ): self.dummy_geolocation_query_json = dummy_geolocation_query_json + self.dummy_reverse_geo_lookup_svc = dummy_reverse_geo_lookup_svc + self.dummy_patient_query_lookup_svc = dummy_patient_query_lookup_svc self.stub_location_srvc = stub_location_srvc async def test_resolve_coordinates(self): result = await self.stub_location_srvc.resolve_coordinates( - dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json + self.dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json ) self.assertEqual(Address(), result) async def test_fetch_facilities(self): result = await self.stub_location_srvc.fetch_facilities( - dummy_reverse_geo_lookup_svc, + self.dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json, - dummy_patient_query_lookup_svc, + self.dummy_patient_query_lookup_svc, ) self.assertIsInstance(result, list) assert result @@ -148,24 +130,40 @@ async def test_fetch_facilities(self): ) -@pytest.mark.usefixtures("dummy_geolocation_query_json") +@pytest.mark.usefixtures( + "dummy_geolocation_query_json", + "dummy_reverse_geo_lookup_svc", + "dummy_patient_query_lookup_svc", + "dummy_patient_query_lookup_svc_none", +) class GeoLocationServiceTest(unittest.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) - def autouse(self, dummy_geolocation_query_json: LocationQueryJSON): + def autouse( + self, + dummy_geolocation_query_json: LocationQueryJSON, + dummy_reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + dummy_patient_query_lookup_svc: Callable[[Address, LocationQueryJSON], list], + dummy_patient_query_lookup_svc_none: Callable[ + [Address, LocationQueryJSON], list + ], + ): self.dummy_geolocation_query_json = dummy_geolocation_query_json + self.dummy_reverse_geo_lookup_svc = dummy_reverse_geo_lookup_svc + self.dummy_patient_query_lookup_svc = dummy_patient_query_lookup_svc + self.dummy_patient_query_lookup_svc_none = dummy_patient_query_lookup_svc_none async def test_resolve_coordinates(self): result = await GeolocationQueryService.resolve_coordinates( - dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json + self.dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json ) expected = Address() self.assertEqual(expected, result, f"Got {result}, expected {expected}") async def test_fetch_facilities(self): result = await GeolocationQueryService.fetch_facilities( - dummy_reverse_geo_lookup_svc, + self.dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json, - dummy_patient_query_lookup_svc, + self.dummy_patient_query_lookup_svc, ) self.assertIsNotNone(result) record = None @@ -175,23 +173,44 @@ async def test_fetch_facilities(self): async def test_fetch_facilities_no_results(self): result = await GeolocationQueryService.fetch_facilities( - dummy_reverse_geo_lookup_svc, + self.dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json, - dummy_patient_query_lookup_svc_none, + self.dummy_patient_query_lookup_svc_none, ) self.assertIsNone(result) -# @pytest.mark.skip("WIP") -@pytest.mark.usefixtures("dummy_geolocation_query_json") +@pytest.mark.skip(reason="WIP") +@pytest.mark.integration +class GeoLocationServiceSqlRepoTest(unittest.IsolatedAsyncioTestCase): + """Test case for Sqlite Repository to test Geolocation Service. + + Before testing, ensure to: + 1. Setup Database + 2. For fetch_facilities, relevant services are configured. + 3. patient_query_lookup_svc is configured to call sqlite repository. + """ + + def setUp(self) -> None: + super().setUp() + + async def test_fetch_facilities(self): ... + + +@pytest.mark.usefixtures("dummy_geolocation_query_json", "dummy_reverse_geo_lookup_svc") class PatientQueryLookupSvcTest(unittest.IsolatedAsyncioTestCase): @pytest.fixture(autouse=True) - def autouse(self, dummy_geolocation_query_json: LocationQueryJSON): + def autouse( + self, + dummy_geolocation_query_json: LocationQueryJSON, + dummy_reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + ): self.dummy_geolocation_query_json = dummy_geolocation_query_json + self.dummy_reverse_geo_lookup_svc = dummy_reverse_geo_lookup_svc async def test_patient_query_lookup_svc(self): providers = await GeolocationQueryService.fetch_facilities( - dummy_reverse_geo_lookup_svc, + self.dummy_reverse_geo_lookup_svc, self.dummy_geolocation_query_json, stub_get_facilities_by_patient_query, ) From 22e14e30329f5ac606826e70fe6744e1dfd438ea Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 9 Sep 2024 16:07:47 +0530 Subject: [PATCH 12/27] refactors dummy tests, removed to conftest. make test works. added test-integration marker WIP. adds support for test markers --- Makefile | 5 ++++- pyproject.toml | 5 +++++ run.sh | 2 +- xcov19/app/settings.py | 12 ++++++++---- xcov19/tests/conftest.py | 16 +++++++--------- xcov19/tests/test_services.py | 35 +++++++++++++++++++++++++++++++---- 6 files changed, 56 insertions(+), 19 deletions(-) diff --git a/Makefile b/Makefile index 2c1fd79..b42d702 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,10 @@ pip-install: pip install --prefer-binary --use-pep517 --check-build-dependencies .[dev] test: - pytest -s xcov19/tests/ + APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "not integration" + +test-integration: + APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "integration" todos: @grep -rn "TODO:" xcov19/ --exclude-dir=node_modules --include="*.py" \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index b3e7118..051363b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,11 @@ markers = [ "unit: marks tests as unit tests", # Add more markers as needed ] +# Add env vars when running pytest +env = [ + "APP_ENV=test", + "APP_DB_ENGINE_URL=sqlite+aiosqlite://" +] [tool.pyright] pythonVersion = "3.12" diff --git a/run.sh b/run.sh index c9ea51b..e7ca90c 100755 --- a/run.sh +++ b/run.sh @@ -1,3 +1,3 @@ #!/bin/bash -poetry run python3 -m xcov19.dev \ No newline at end of file +APP_ENV=dev APP_DB_ENGINE_URL="sqlite+aiosqlite:///xcov19.db" poetry run python3 -m xcov19.dev \ No newline at end of file diff --git a/xcov19/app/settings.py b/xcov19/app/settings.py index 1cd9520..2059753 100644 --- a/xcov19/app/settings.py +++ b/xcov19/app/settings.py @@ -7,8 +7,9 @@ https://docs.pydantic.dev/latest/usage/settings/ """ +from typing import Annotated from blacksheep import FromHeader -from pydantic import BaseModel +from pydantic import BaseModel, Field from pydantic_settings import BaseSettings, SettingsConfigDict @@ -26,6 +27,8 @@ class Site(BaseModel): class Settings(BaseSettings): + db_engine_url: Annotated[str, "database connection string"] = Field(default=...) + # to override info: # export app_info='{"title": "x", "version": "0.0.2"}' info: APIInfo = APIInfo() @@ -34,13 +37,14 @@ class Settings(BaseSettings): # export app_app='{"show_error_details": True}' app: App = App() - db_engine_url: str = "sqlite+aiosqlite:///" # "sqlite+aiosqlite:///xcov19.db" - model_config = SettingsConfigDict(env_prefix="APP_") def load_settings() -> Settings: - return Settings() + settings = Settings() + if not settings.db_engine_url: + raise ValueError("Missing environment variable: APP_DB_ENGINE_URL") + return settings class FromOriginMatchHeader(FromHeader[str]): diff --git a/xcov19/tests/conftest.py b/xcov19/tests/conftest.py index 3b5150a..5026af0 100644 --- a/xcov19/tests/conftest.py +++ b/xcov19/tests/conftest.py @@ -4,7 +4,7 @@ from blacksheep.testing import TestClient import pytest -from xcov19.tests.start_server import start_server +from blacksheep import Application from xcov19.dto import ( AnonymousId, GeoLocation, @@ -35,12 +35,12 @@ def anyio_backend(request): @pytest.fixture(scope="class") -def dummy_coordinates(): +def dummy_coordinates() -> GeoLocation: return GeoLocation(lat=0, lng=0) @pytest.fixture(scope="class") -def dummy_geolocation_query_json(dummy_coordinates): +def dummy_geolocation_query_json(dummy_coordinates) -> LocationQueryJSON: return LocationQueryJSON( location=dummy_coordinates, cust_id=AnonymousId(cust_id="test_cust_id"), @@ -131,11 +131,9 @@ def stub_location_srvc() -> LocationQueryServiceInterface: @pytest.fixture(scope="function", name="client") -async def test_client() -> TestClient: +async def test_client(): # Create a test client - app = await anext(start_server()) - return TestClient(app) + async def start_client(app: Application) -> TestClient: + return TestClient(app) - -@pytest.fixture(scope="function", name="db_setup") -async def setup_database(): ... + return start_client diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index 5979b16..d23b2c4 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -2,6 +2,9 @@ from typing import List import pytest import unittest + +from rodi import ContainerProtocol +from xcov19.tests.start_server import start_server from xcov19.domain.models.provider import ( Contact, FacilityEstablishment, @@ -21,6 +24,8 @@ import random +from sqlalchemy.ext.asyncio import AsyncSession + RANDOM_SEED = random.seed(1) @@ -182,7 +187,8 @@ async def test_fetch_facilities_no_results(self): @pytest.mark.skip(reason="WIP") @pytest.mark.integration -class GeoLocationServiceSqlRepoTest(unittest.IsolatedAsyncioTestCase): +@pytest.mark.usefixtures("dummy_reverse_geo_lookup_svc", "dummy_geolocation_query_json") +class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): """Test case for Sqlite Repository to test Geolocation Service. Before testing, ensure to: @@ -191,10 +197,31 @@ class GeoLocationServiceSqlRepoTest(unittest.IsolatedAsyncioTestCase): 3. patient_query_lookup_svc is configured to call sqlite repository. """ - def setUp(self) -> None: - super().setUp() + async def asyncSetUp(self) -> None: + app = await anext(start_server()) + self._container: ContainerProtocol = app.services + self._seed_db(self._container.resolve(AsyncSession)) + await super().asyncSetUp() + + def _seed_db(self, session: AsyncSession) -> None: + # TODO: add data to sqlite tables based on dummy_geolocation_query_json + # and add providers data. + ... - async def test_fetch_facilities(self): ... + def _patient_query_lookup_svc_using_repo( + self, address: Address, query: LocationQueryJSON + ) -> Callable[[Address, LocationQueryJSON], List[FacilitiesResult]]: ... + + async def test_fetch_facilities( + self, dummy_reverse_geo_lookup_svc, dummy_geolocation_query_json + ): + # TODO Implement test_fetch_facilities like this: + # providers = await GeolocationQueryService.fetch_facilities( + # dummy_reverse_geo_lookup_svc, + # dummy_geolocation_query_json, + # self._patient_query_lookup_svc_using_repo + # ) + ... @pytest.mark.usefixtures("dummy_geolocation_query_json", "dummy_reverse_geo_lookup_svc") From 90cb9436c80aa6ad558125fa13de915930cd6a43 Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 9 Sep 2024 16:15:33 +0530 Subject: [PATCH 13/27] fix typo --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index fb8ed9d..78d5341 100644 --- a/.gitignore +++ b/.gitignore @@ -56,7 +56,7 @@ coverage.xml *.log local_settings.py db.sqlite3 -* .db +*.db # Flask stuff: instance/ From a180a723a936d0f7401519de0097b8fdce6800a0 Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 9 Sep 2024 16:24:54 +0530 Subject: [PATCH 14/27] updated how tests are run to include make test which sets necessary env vars --- .github/workflows/stage_ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/stage_ci.yml b/.github/workflows/stage_ci.yml index 44965ca..7093d3e 100644 --- a/.github/workflows/stage_ci.yml +++ b/.github/workflows/stage_ci.yml @@ -23,8 +23,7 @@ jobs: uses: ./.github/actions - name: Run tests - run: | - poetry run pytest + run: make test pre-commit: runs-on: ubuntu-latest From 5d7982416b060ad2d03bd731cd54ac6b684c2009 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 10 Sep 2024 03:07:54 +0530 Subject: [PATCH 15/27] added changes to support api and integration tests --- Makefile | 2 +- pyproject.toml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index b42d702..00ce2a1 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,7 @@ pip-install: pip install --prefer-binary --use-pep517 --check-build-dependencies .[dev] test: - APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "not integration" + APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "not slow and not integration and not api" test-integration: APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "integration" diff --git a/pyproject.toml b/pyproject.toml index 516ea05..d40d05c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,7 @@ asyncio_mode = "auto" markers = [ "slow: marks tests as slow (deselect with '-m \"not slow\"')", "integration: marks tests as integration tests", + "api: mark api tests", "unit: marks tests as unit tests", # Add more markers as needed ] From c576c4311f3387f88a1e6a0da92745831bbabfd9 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 10 Sep 2024 03:14:21 +0530 Subject: [PATCH 16/27] added sqlite models to support sqlite database. database.py enhances app level infra setup using DI using rodi's Container that sets up database and session --- xcov19/app/database.py | 41 ++++++------ xcov19/infra/models.py | 138 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 158 insertions(+), 21 deletions(-) create mode 100644 xcov19/infra/models.py diff --git a/xcov19/app/database.py b/xcov19/app/database.py index 6e8c07f..08879a9 100644 --- a/xcov19/app/database.py +++ b/xcov19/app/database.py @@ -1,13 +1,14 @@ from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager import sys from rodi import Container -from sqlmodel import SQLModel - +from xcov19.infra.models import SQLModel +from sqlmodel import text from xcov19.app.settings import Settings +from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper from sqlalchemy.ext.asyncio import ( create_async_engine, AsyncEngine, - AsyncSession, async_sessionmaker, ) @@ -38,36 +39,33 @@ class SessionFactory: def __init__(self, engine: AsyncEngine): self._engine = engine - def __call__(self) -> async_sessionmaker[AsyncSession]: + def __call__(self) -> async_sessionmaker[AsyncSessionWrapper]: return async_sessionmaker( - self._engine, class_=AsyncSession, expire_on_commit=False + self._engine, class_=AsyncSessionWrapper, expire_on_commit=False ) async def setup_database(engine: AsyncEngine) -> None: """Sets up tables for database.""" async with engine.begin() as conn: + # see: https://sqlmodel.tiangolo.com/tutorial/relationship-attributes/cascade-delete-relationships/#enable-foreign-key-support-in-sqlite + await conn.execute(text("PRAGMA foreign_keys=ON")) await conn.run_sync(SQLModel.metadata.create_all) + await conn.commit() + db_logger.info("===== Database tables setup. =====") -async def create_async_session( - AsyncSessionFactory: async_sessionmaker[AsyncSession], -) -> AsyncGenerator[AsyncSession, None]: - """Create an asynchronous database session.""" - async with AsyncSessionFactory() as session: - try: - yield session - finally: - await session.close() - - -async def start_db_session(container: Container): +@asynccontextmanager +async def start_db_session( + container: Container, +) -> AsyncGenerator[AsyncSessionWrapper, None]: """Starts a new database session given SessionFactory.""" # add LocalAsyncSession - local_async_session = create_async_session( - container.resolve(async_sessionmaker[AsyncSession]) + async_session_factory: async_sessionmaker[AsyncSessionWrapper] = container.resolve( + async_sessionmaker[AsyncSessionWrapper] ) - container.add_instance(local_async_session, AsyncSession) + async with async_session_factory() as local_async_session: + yield local_async_session def configure_database_session(container: Container, settings: Settings) -> Container: @@ -82,8 +80,9 @@ def configure_database_session(container: Container, settings: Settings) -> Cont container.add_instance(engine, AsyncEngine) # add sessionmaker + session_factory = SessionFactory(engine) container.add_singleton_by_factory( - SessionFactory(engine), async_sessionmaker[AsyncSession] + session_factory, async_sessionmaker[AsyncSessionWrapper] ) db_logger.info("====== Database session configured. ======") diff --git a/xcov19/infra/models.py b/xcov19/infra/models.py new file mode 100644 index 0000000..2e813a0 --- /dev/null +++ b/xcov19/infra/models.py @@ -0,0 +1,138 @@ +""" +Database Models and Delete Behavior Design Principles + +1. Query-Patient-Location Relationship: + - Every Query must have both a Patient and a Location associated with it. + - A Patient can have multiple Queries. + - A Location can be associated with multiple Queries. + +2. Delete Restrictions: + - Patient and Location records cannot be deleted if there are any Queries referencing them. + - This is enforced by the "RESTRICT" ondelete option in the Query model's foreign keys. + +3. Orphan Deletion: + - A Patient or Location should be deleted only when there are no more Queries referencing it. + - This is handled by custom event listeners that check for remaining Queries after a Query deletion. + +4. Cascading Behavior: + - There is no automatic cascading delete from Patient or Location to Query. + - Queries must be explicitly deleted before their associated Patient or Location can be removed. + +5. Transaction Handling: + - Delete operations and subsequent orphan checks should occur within the same transaction. + - Event listeners use the existing database connection to ensure consistency with the main transaction. + +6. Error Handling: + - Errors during the orphan deletion process should not silently fail. + - Exceptions in event listeners are logged and re-raised to ensure proper transaction rollback. + +7. Data Integrity: + - Database-level constraints (foreign keys, unique constraints) are used in conjunction with SQLAlchemy model definitions to ensure data integrity. + +These principles aim to maintain referential integrity while allowing for the cleanup of orphaned Patient and Location records when appropriate. +""" + +from __future__ import annotations + +from typing import List +from sqlmodel import SQLModel, Field, Relationship +from sqlalchemy import Column, Text, Float, Index +from sqlalchemy.orm import relationship, Mapped +import uuid +from sqlalchemy.dialects.sqlite import TEXT + + +class Patient(SQLModel, table=True): + patient_id: str = Field( + sa_column=Column( + TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) + ), + allow_mutation=False, + ) + queries: Mapped[List["Query"]] = Relationship( + # back_populates="patient", + passive_deletes="all", + cascade_delete=True, + sa_relationship=relationship(back_populates="patient"), + ) + + +class Query(SQLModel, table=True): + """Every Query must have both a Patient and a Location.""" + + query_id: str = Field( + sa_column=Column( + TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) + ), + allow_mutation=False, + ) + query: str = Field(allow_mutation=False, sa_column=Column(Text)) + # Restrict deleting Patient record when there is atleast 1 query referencing it + patient_id: str = Field(foreign_key="patient.patient_id", ondelete="RESTRICT") + # Restrict deleting Location record when there is atleast 1 query referencing it + location_id: str = Field(foreign_key="location.location_id", ondelete="RESTRICT") + location: Location = Relationship(back_populates="queries") + patient: Patient = Relationship(back_populates="queries") + + +class Location(SQLModel, table=True): + __table_args__ = ( + Index("ix_location_composite_lat_lng", "latitude", "longitude", unique=True), + ) + location_id: str = Field( + sa_column=Column( + TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) + ), + allow_mutation=False, + ) + latitude: float = Field(sa_column=Column(Float)) + longitude: float = Field(sa_column=Column(Float)) + queries: Mapped[List["Query"]] = Relationship( + # back_populates="location", + cascade_delete=True, + passive_deletes=True, + sa_relationship=relationship(back_populates="location"), + ) + + +# TODO: Define Provider SQL model fields +# class Provider(SQLModel, table=True): +# # TODO: Compare with Github issue, domain model and noccodb +# ... + + +# TODO: Add Model events for database ops during testing +# @event.listens_for(Query, "after_delete") +# def delete_dangling_location(mapper: Mapper, connection: Engine, target: Query): +# """Deletes orphan Location when no related queries exist.""" +# local_session = sessionmaker(connection) +# with local_session() as session: +# stmt = ( +# select(func.count()) +# .select_from(Query) +# .where(Query.location_id == target.location_id) +# ) +# if ( +# num_queries := session.execute(stmt).scalar_one_or_none() +# ) and num_queries <= 1: +# location: Location = session.get(Location, target.location_id) +# session.delete(location) +# session.flush() + + +# @event.listens_for(Query, "after_delete") +# def delete_dangling_patient(mapper: Mapper, connection: Engine, target: Query): +# """Deletes orphan Patient records when no related queries exist.""" +# local_session = sessionmaker(connection) +# with local_session() as session: +# stmt = ( +# select(func.count()) +# .select_from(Query) +# .where(Query.patient_id == target.patient_id) +# ) +# if ( +# num_queries := session.execute(stmt).scalar_one_or_none() +# ) and num_queries <= 1: +# patient: Patient = session.get(Patient, target.patient_id) +# session.delete(patient) +# session.flush() From fca13b0fe09b5a8285eb00cb82866c2c0a3eeb95 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 10 Sep 2024 03:15:29 +0530 Subject: [PATCH 17/27] removed obsolete trigger functions --- xcov19/app/main.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/xcov19/app/main.py b/xcov19/app/main.py index 7da3d5e..41001fb 100644 --- a/xcov19/app/main.py +++ b/xcov19/app/main.py @@ -8,7 +8,6 @@ from xcov19.app.database import ( configure_database_session, setup_database, - start_db_session, ) from xcov19.app.auth import configure_authentication from xcov19.app.controllers import controller_router @@ -46,6 +45,5 @@ async def on_start(): container: ContainerProtocol = app.services if not isinstance(container, Container): raise ValueError("Container is not a valid container") - await start_db_session(container) engine = container.resolve(AsyncEngine) await setup_database(engine) From c8f23f9469c9ce1005d1597667b66873131fef02 Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 10 Sep 2024 03:24:40 +0530 Subject: [PATCH 18/27] major rewrite of test services for integration test for Sqlite Repo DB. refactored code to use container to setup database using start_test_database and asyncSetUp. WIP: test_fetch_facilities --- xcov19/infra/__init__.py | 0 xcov19/tests/data/__init__.py | 0 xcov19/tests/data/seed_db.py | 43 +++++++++++++++++++++++++++++++++++ xcov19/tests/start_server.py | 18 +++++++++++++-- xcov19/tests/test_services.py | 38 +++++++++++++++++++------------ 5 files changed, 83 insertions(+), 16 deletions(-) create mode 100644 xcov19/infra/__init__.py create mode 100644 xcov19/tests/data/__init__.py create mode 100644 xcov19/tests/data/seed_db.py diff --git a/xcov19/infra/__init__.py b/xcov19/infra/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/xcov19/tests/data/__init__.py b/xcov19/tests/data/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/xcov19/tests/data/seed_db.py b/xcov19/tests/data/seed_db.py new file mode 100644 index 0000000..0c451c2 --- /dev/null +++ b/xcov19/tests/data/seed_db.py @@ -0,0 +1,43 @@ +"""Dummy data to seed to database models. +Mapped to SQLModel. + +dummy GeoLocation: +lat=0 +lng=0 + +cust_id=test_cust_id +query_id=test_query_id +""" + +from sqlalchemy import ScalarResult +from sqlmodel import select +from xcov19.infra.models import Patient, Query, Location +from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper + + +async def seed_data(session: AsyncSessionWrapper): + """ + Now you can do: + res = await self._session.exec(select(Query)) + query = res.first() + print("query", query) + res = await self._session.exec(select(Patient).where(Patient.queries.any(Query.query_id == query.query_id))) + print("patient", res.first()) + res = await self._session.exec(select(Location).where(Location.queries.any(Query.query_id == query.query_id))) + print("location", res.first()) + """ + query = Query( + query=""" + Runny nose and high fever suddenly lasting for few hours. + Started yesterday. + """ + ) # type: ignore + + patient = Patient(queries=[query]) # type: ignore + + patient_location = Location(latitude=0, longitude=0, queries=[query]) # type: ignore + session.add_all([patient_location, patient]) + await session.commit() + query_result: ScalarResult = await session.exec(select(Query)) + if not query_result.first(): + raise RuntimeError("Database seeding failed") diff --git a/xcov19/tests/start_server.py b/xcov19/tests/start_server.py index f1a7634..697d687 100644 --- a/xcov19/tests/start_server.py +++ b/xcov19/tests/start_server.py @@ -1,9 +1,14 @@ from collections.abc import AsyncGenerator -from xcov19.app.main import app from blacksheep import Application +from contextlib import asynccontextmanager +from rodi import Container, ContainerProtocol +from xcov19.app.database import configure_database_session, setup_database +from xcov19.app.settings import load_settings +from sqlalchemy.ext.asyncio import AsyncEngine -async def start_server() -> AsyncGenerator[Application, None]: +@asynccontextmanager +async def start_server(app: Application) -> AsyncGenerator[Application, None]: """Start a test server for automated testing.""" try: await app.start() @@ -11,3 +16,12 @@ async def start_server() -> AsyncGenerator[Application, None]: finally: if app.started: await app.stop() + + +async def start_test_database(container: ContainerProtocol) -> None: + """Database setup for integration tests.""" + if not isinstance(container, Container): + raise RuntimeError("container not of type Container.") + configure_database_session(container, load_settings()) + engine = container.resolve(AsyncEngine) + await setup_database(engine) diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index d23b2c4..ff99c7b 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,10 +1,13 @@ from collections.abc import Callable +from contextlib import AsyncExitStack from typing import List import pytest import unittest -from rodi import ContainerProtocol -from xcov19.tests.start_server import start_server +from rodi import Container, ContainerProtocol +from xcov19.app.database import start_db_session +from xcov19.tests.data.seed_db import seed_data +from xcov19.tests.start_server import start_test_database from xcov19.domain.models.provider import ( Contact, FacilityEstablishment, @@ -24,7 +27,8 @@ import random -from sqlalchemy.ext.asyncio import AsyncSession +from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper + RANDOM_SEED = random.seed(1) @@ -185,7 +189,7 @@ async def test_fetch_facilities_no_results(self): self.assertIsNone(result) -@pytest.mark.skip(reason="WIP") +# @pytest.mark.skip(reason="WIP") @pytest.mark.integration @pytest.mark.usefixtures("dummy_reverse_geo_lookup_svc", "dummy_geolocation_query_json") class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): @@ -198,23 +202,29 @@ class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): """ async def asyncSetUp(self) -> None: - app = await anext(start_server()) - self._container: ContainerProtocol = app.services - self._seed_db(self._container.resolve(AsyncSession)) + self._stack = AsyncExitStack() + container: ContainerProtocol = Container() + await start_test_database(container) + self._session = await self._stack.enter_async_context( + start_db_session(container) + ) + if not isinstance(self._session, AsyncSessionWrapper): + raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + await seed_data(self._session) await super().asyncSetUp() - def _seed_db(self, session: AsyncSession) -> None: - # TODO: add data to sqlite tables based on dummy_geolocation_query_json - # and add providers data. - ... + async def asyncTearDown(self) -> None: + print("async closing test server db session closing.") + await self._session.commit() + await self._stack.aclose() + print("async test server closing.") + await super().asyncTearDown() def _patient_query_lookup_svc_using_repo( self, address: Address, query: LocationQueryJSON ) -> Callable[[Address, LocationQueryJSON], List[FacilitiesResult]]: ... - async def test_fetch_facilities( - self, dummy_reverse_geo_lookup_svc, dummy_geolocation_query_json - ): + async def test_fetch_facilities(self): # TODO Implement test_fetch_facilities like this: # providers = await GeolocationQueryService.fetch_facilities( # dummy_reverse_geo_lookup_svc, From 5ddf9ade81114703d10861d42cd654f1ae1a8dad Mon Sep 17 00:00:00 2001 From: codecakes Date: Wed, 11 Sep 2024 21:13:13 +0530 Subject: [PATCH 19/27] refactored into setUpTestDatabase class for DRY --- xcov19/tests/start_server.py | 49 ++++++++++++++++++++++------ xcov19/tests/test_geolocation_api.py | 2 +- xcov19/tests/test_services.py | 35 +++++++++----------- 3 files changed, 56 insertions(+), 30 deletions(-) diff --git a/xcov19/tests/start_server.py b/xcov19/tests/start_server.py index 697d687..5dae5db 100644 --- a/xcov19/tests/start_server.py +++ b/xcov19/tests/start_server.py @@ -1,10 +1,15 @@ from collections.abc import AsyncGenerator from blacksheep import Application -from contextlib import asynccontextmanager +from contextlib import AsyncExitStack, asynccontextmanager from rodi import Container, ContainerProtocol -from xcov19.app.database import configure_database_session, setup_database +from xcov19.app.database import ( + configure_database_session, + setup_database, + start_db_session, +) from xcov19.app.settings import load_settings -from sqlalchemy.ext.asyncio import AsyncEngine +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession +from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper @asynccontextmanager @@ -18,10 +23,34 @@ async def start_server(app: Application) -> AsyncGenerator[Application, None]: await app.stop() -async def start_test_database(container: ContainerProtocol) -> None: - """Database setup for integration tests.""" - if not isinstance(container, Container): - raise RuntimeError("container not of type Container.") - configure_database_session(container, load_settings()) - engine = container.resolve(AsyncEngine) - await setup_database(engine) +class setUpTestDatabase: + def __init__(self) -> None: + self._stack = AsyncExitStack() + self._session: AsyncSession | AsyncSessionWrapper | None = None + self._container: ContainerProtocol = Container() + + async def setup_test_database(self) -> None: + """Database setup for integration tests.""" + if not isinstance(self._container, Container): + raise RuntimeError("container not of type Container.") + configure_database_session(self._container, load_settings()) + engine = self._container.resolve(AsyncEngine) + await setup_database(engine) + + async def start_async_session(self) -> AsyncSession | AsyncSessionWrapper: + if not isinstance(self._container, Container): + raise RuntimeError("container not of type Container.") + self._session = await self._stack.enter_async_context( + start_db_session(self._container) + ) + if not isinstance(self._session, AsyncSessionWrapper): + raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + return self._session + + async def aclose(self) -> None: + print("async closing test server db session closing.") + if not isinstance(self._session, AsyncSessionWrapper): + raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + await self._session.commit() + await self._stack.aclose() + print("async test server closing.") diff --git a/xcov19/tests/test_geolocation_api.py b/xcov19/tests/test_geolocation_api.py index 083b13e..22d0421 100644 --- a/xcov19/tests/test_geolocation_api.py +++ b/xcov19/tests/test_geolocation_api.py @@ -4,7 +4,7 @@ from blacksheep import Content, Response -@pytest.mark.integration +@pytest.mark.api @pytest.mark.usefixtures("client") class TestGeolocationAPI: async def test_location_query_endpoint(self, client): diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index ff99c7b..b279daf 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,13 +1,12 @@ from collections.abc import Callable -from contextlib import AsyncExitStack from typing import List import pytest import unittest +import random + -from rodi import Container, ContainerProtocol -from xcov19.app.database import start_db_session from xcov19.tests.data.seed_db import seed_data -from xcov19.tests.start_server import start_test_database +from xcov19.tests.start_server import setUpTestDatabase from xcov19.domain.models.provider import ( Contact, FacilityEstablishment, @@ -24,9 +23,6 @@ from xcov19.utils.mixins import InterfaceProtocolCheckMixin - -import random - from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper @@ -202,22 +198,23 @@ class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): """ async def asyncSetUp(self) -> None: - self._stack = AsyncExitStack() - container: ContainerProtocol = Container() - await start_test_database(container) - self._session = await self._stack.enter_async_context( - start_db_session(container) - ) - if not isinstance(self._session, AsyncSessionWrapper): - raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + # self._stack = AsyncExitStack() + # container: ContainerProtocol = Container() + # await start_test_database(container) + # self._session = await self._stack.enter_async_context( + # start_db_session(container) + # ) + # if not isinstance(self._session, AsyncSessionWrapper): + # raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + self._test_db = setUpTestDatabase() + await self._test_db.setup_test_database() + self._session = await self._test_db.start_async_session() + assert isinstance(self._session, AsyncSessionWrapper) await seed_data(self._session) await super().asyncSetUp() async def asyncTearDown(self) -> None: - print("async closing test server db session closing.") - await self._session.commit() - await self._stack.aclose() - print("async test server closing.") + await self._test_db.aclose() await super().asyncTearDown() def _patient_query_lookup_svc_using_repo( From 3d15a1fbb111ed77fa493d111c8eb54fbb2fad4d Mon Sep 17 00:00:00 2001 From: codecakes Date: Wed, 11 Sep 2024 21:24:25 +0530 Subject: [PATCH 20/27] WIP test_fetch_facilities integration test --- xcov19/tests/test_services.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index b279daf..cbcc39a 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -197,15 +197,16 @@ class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): 3. patient_query_lookup_svc is configured to call sqlite repository. """ + @pytest.fixture(autouse=True) + def autouse( + self, + dummy_geolocation_query_json: LocationQueryJSON, + dummy_reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], + ): + self.dummy_geolocation_query_json = dummy_geolocation_query_json + self.dummy_reverse_geo_lookup_svc = dummy_reverse_geo_lookup_svc + async def asyncSetUp(self) -> None: - # self._stack = AsyncExitStack() - # container: ContainerProtocol = Container() - # await start_test_database(container) - # self._session = await self._stack.enter_async_context( - # start_db_session(container) - # ) - # if not isinstance(self._session, AsyncSessionWrapper): - # raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") self._test_db = setUpTestDatabase() await self._test_db.setup_test_database() self._session = await self._test_db.start_async_session() @@ -219,16 +220,18 @@ async def asyncTearDown(self) -> None: def _patient_query_lookup_svc_using_repo( self, address: Address, query: LocationQueryJSON - ) -> Callable[[Address, LocationQueryJSON], List[FacilitiesResult]]: ... + ) -> List[FacilitiesResult]: ... async def test_fetch_facilities(self): # TODO Implement test_fetch_facilities like this: - # providers = await GeolocationQueryService.fetch_facilities( - # dummy_reverse_geo_lookup_svc, - # dummy_geolocation_query_json, - # self._patient_query_lookup_svc_using_repo - # ) - ... + providers = await GeolocationQueryService.fetch_facilities( + self.dummy_reverse_geo_lookup_svc, + self.dummy_geolocation_query_json, + self._patient_query_lookup_svc_using_repo, + ) + assert providers + self.assertIsInstance(providers, list) + self.assertIs(len(providers), 1) @pytest.mark.usefixtures("dummy_geolocation_query_json", "dummy_reverse_geo_lookup_svc") From 87265fd1d80355beea8d677ad16b16291c82f1ba Mon Sep 17 00:00:00 2001 From: codecakes Date: Tue, 10 Sep 2024 03:35:04 +0530 Subject: [PATCH 21/27] Feature/sqlite repo: setup for GeoLocationServiceSqlRepoDBTest (#68) Unstable, active and WIP: This is where things start taking shape for setting test coverage `GeoLocationServiceSqlRepoDBTest` to implement issue: #26 ; - major rewrite of test services for integration test for Sqlite Repo DB. refactored code to use container to setup database using start_test_database and asyncSetUp. WIP: test_fetch_facilities - added sqlite models to support sqlite database. database.py enhances app level infra setup using DI using rodi's Container that sets up database and session --- xcov19/tests/test_services.py | 1 + 1 file changed, 1 insertion(+) diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index cbcc39a..5df58d0 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -1,4 +1,5 @@ from collections.abc import Callable +from contextlib import AsyncExitStack from typing import List import pytest import unittest From 0dd37fb5181b99a590aeb22b797e61c1d5028e79 Mon Sep 17 00:00:00 2001 From: codecakes Date: Fri, 13 Sep 2024 14:48:26 +0530 Subject: [PATCH 22/27] added docstrings, TODO explainer and specific exception --- xcov19/tests/start_server.py | 26 ++++++++++++++++++++++---- xcov19/tests/test_services.py | 5 ++++- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/xcov19/tests/start_server.py b/xcov19/tests/start_server.py index 5dae5db..4ee8768 100644 --- a/xcov19/tests/start_server.py +++ b/xcov19/tests/start_server.py @@ -12,6 +12,18 @@ from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper +class InvalidSessionTypeError(RuntimeError): + """Exception raised when the session is not of the expected type.""" + + pass + + +class InvalidDIContainerTypeError(RuntimeError): + """Exception raised when valid DI container not found.""" + + pass + + @asynccontextmanager async def start_server(app: Application) -> AsyncGenerator[Application, None]: """Start a test server for automated testing.""" @@ -24,6 +36,8 @@ async def start_server(app: Application) -> AsyncGenerator[Application, None]: class setUpTestDatabase: + """Manages the lifecycle of the test database.""" + def __init__(self) -> None: self._stack = AsyncExitStack() self._session: AsyncSession | AsyncSessionWrapper | None = None @@ -32,25 +46,29 @@ def __init__(self) -> None: async def setup_test_database(self) -> None: """Database setup for integration tests.""" if not isinstance(self._container, Container): - raise RuntimeError("container not of type Container.") + raise InvalidDIContainerTypeError("Container not of valid type.") configure_database_session(self._container, load_settings()) engine = self._container.resolve(AsyncEngine) await setup_database(engine) async def start_async_session(self) -> AsyncSession | AsyncSessionWrapper: if not isinstance(self._container, Container): - raise RuntimeError("container not of type Container.") + raise InvalidDIContainerTypeError("Container not of valid type.") self._session = await self._stack.enter_async_context( start_db_session(self._container) ) if not isinstance(self._session, AsyncSessionWrapper): - raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + raise InvalidSessionTypeError( + f"{self._session} is not a AsyncSessionWrapper value." + ) return self._session async def aclose(self) -> None: print("async closing test server db session closing.") if not isinstance(self._session, AsyncSessionWrapper): - raise RuntimeError(f"{self._session} is not a AsyncSessionWrapper value.") + raise InvalidSessionTypeError( + f"{self._session} is not a AsyncSessionWrapper value." + ) await self._session.commit() await self._stack.aclose() print("async test server closing.") diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index cbcc39a..f3de758 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -220,7 +220,10 @@ async def asyncTearDown(self) -> None: def _patient_query_lookup_svc_using_repo( self, address: Address, query: LocationQueryJSON - ) -> List[FacilitiesResult]: ... + ) -> List[FacilitiesResult]: + # TODO: Implement a patient query lookup service + # that returns type List[FacilitiesResult] + ... async def test_fetch_facilities(self): # TODO Implement test_fetch_facilities like this: From 938b62f127fb5085950761d00948cf16f6bc252c Mon Sep 17 00:00:00 2001 From: JiyaGupta-cs <140608790+JiyaGupta-cs@users.noreply.github.com> Date: Wed, 18 Sep 2024 02:19:53 +0530 Subject: [PATCH 23/27] Fix: Removed commented out line in pyproject.toml and setup project in local (#74) ## Description - Remove the commented out line - Ensure project is setup and runs successfully on your local I have uncommented the line instead of removing that to install them as dev dependencies Fixes #52 --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 845b56d..4e4d5fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,8 +20,6 @@ alembic = "^1.13.2" aiosqlite = "^0.20.0" sqlmodel = {version="^0.0.22"} -# [tool.poetry.group.dev.dependencies] - ruff = { version = "^0.6.3", optional = true } mypy = { version = "^1.11.2", optional = true } blacksheep-cli = { version = "^0.0.4", optional = true } From 210d0c6d20954cadc129dd5c1e14895f8d6c88a7 Mon Sep 17 00:00:00 2001 From: codecakes Date: Fri, 20 Sep 2024 17:07:48 +0530 Subject: [PATCH 24/27] updated promotion product messaging --- README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/README.md b/README.md index e408932..189de6c 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,8 @@ Project Healthcare, from hereon called the brokering service, is a set of upstre 2. Exposing geolocation API to caputure patient's location and offer nearby facilities to take action on based on their diagnosis and location. ## Extensible and Open + +Unlock advanced healthcare integration without the enterprise price tag. Connect systems, personalize care, and reach underserved communities with ease. Built using modified project template for [BlackSheep](https://github.com/Neoteroi/BlackSheep) web framework to start Web APIs, the project structure adheres blacksheep's domain and infrastructure segregation philosophy. It naturally fits the domain driven design philosophy using ports and adapters pattern so expect slight shift towards domain models and services structure. @@ -44,6 +46,24 @@ The specification follows a sandwich service model i.e. it requires one or more 2. The brokering service stores transient diagnosis request and enqueues them to upstream provider service that should return records of facilities and their specialties based on the diagnosis. 3. The brokering service returns the records of matching facilities to the downstream consumer service. +## Revolutionizing Healthcare Integration for Communities + +All-in-one, affordable, and scalable health integration platform tailored for small to medium-sized businesses (SMBs) and rural healthcare providing Advanced Integration, Personalized Care and Scalable Solutions. + +| **Feature** | **Healthcare Infrastructure Service** | **Competitors** | +|--------------------------------------------------|------------|--------------------------------| +| **Remote Patient Monitoring** | ✔ | Limited/None | +| **EHR/EMR Integration** | ✔ | ✔ (Enterprise solutions) | +| **Data Normalization, Anonymization and Aggregation** | ✔ | Limited/None | +| **Rural Healthcare First** | ✔ | ✔ (Limited focus) | +| **Personalized Patient Journeys and Engagement** | ✔ | Limited/None | +| **Developer-Friendly APIs** | ✔ | ✔ (Enterprise solutions only) | +| **Cost** | Freemium to Affordable | High Enterprise Pricing | + +### What Sets Us Apart? + +Unlike other platforms, HIS combines advanced features in one solution, delivering exceptional healthcare services without the complexity or high costs. + ## Getting started ### For Linux and Mac From e19185b4feffca5e4cdd5a47dff4612d22f1b56a Mon Sep 17 00:00:00 2001 From: codecakes Date: Sat, 21 Sep 2024 01:19:42 +0530 Subject: [PATCH 25/27] adds docker support for integration testing and running containerized service (#75) resolves #73 adds: - Dockerfile.build: This is the base multi-stage image. - Dockerfile: Main image to run the service using docker compose. - Dockerfile.test-integration: For running integration tests. ## Summary by Sourcery Add Docker support for integration testing and running the service in a containerized environment. Update documentation to guide developers on setting up and using Docker for local testing and service emulation. Introduce Dockerfiles for building base, main, and integration test images, and add a Docker Compose configuration for service deployment. New Features: - Introduce Docker support for integration testing and running the service in a containerized environment using Docker Compose. Enhancements: - Update CONTRIBUTING.md and README.md to include instructions for setting up and running the containerized application, enhancing the documentation for developers. Build: - Add Dockerfile.build for creating a base multi-stage image, Dockerfile for the main service image, and Dockerfile.test-integration for integration testing. Deployment: - Add docker-compose.yml to define the containerized service setup, including shared configuration for ports, volumes, and logging. Documentation: - Enhance user-facing documentation in CONTRIBUTING.md and README.md to guide developers on using Docker for local integration testing and service emulation. --- .github/workflows/stage_ci.yml | 3 +- CONTRIBUTING.md | 43 ++++++++++-- Dockerfile | 11 +++ Dockerfile.build | 120 +++++++++++++++++++++++++++++++++ Dockerfile.test-integration | 16 +++++ Makefile | 17 ++++- README.md | 2 + docker-compose.yml | 17 +++++ pyproject.toml | 2 +- run.sh | 2 +- 10 files changed, 225 insertions(+), 8 deletions(-) create mode 100644 Dockerfile create mode 100644 Dockerfile.build create mode 100644 Dockerfile.test-integration create mode 100644 docker-compose.yml diff --git a/.github/workflows/stage_ci.yml b/.github/workflows/stage_ci.yml index 7093d3e..cd00b87 100644 --- a/.github/workflows/stage_ci.yml +++ b/.github/workflows/stage_ci.yml @@ -36,5 +36,6 @@ jobs: - name: Run Pre-commit run: | + if [ -f "poetry.lock" ]; then echo "running poetry lock" && poetry lock --no-update; fi; poetry install --no-root --all-extras - poetry run pre-commit run --all-files \ No newline at end of file + poetry run pre-commit run --all-files; \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 491bbd6..1223dba 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -19,6 +19,9 @@ Change directories into the newly cloned project-healthcare folder: `cd project-healthcare` ## Setting Up Your Development Environment + +When you are developing features or running unit-tests, the local setup prescribed here without container setup will work fine. However, it is recommended to setup the containers as well for local integration testing and running the service in a container to emulate staging or production environment. + ### - Install Dependencies: #### Set Up the Development Environment @@ -37,6 +40,42 @@ Use this command to set up automatic checks that will help catch errors in your Set up your preferred code editor or IDE for a smooth development experience. Consider installing extensions or plugins for syntax highlighting, code completion, and debugging specific to the programming languages used in the project. +### - Running & Testing Containerized Setup: + +`Makefile` contains actual script for setup. `docker` is replaceable with `podman` as a drop-in if you use Podman. + +All images use the base image from `Dockerfile.build` for multi-stage builds. + +#### Set Up Container Images + +1. Setup base image: + + ```bash + # Build the base Docker image for the project + make docker-build + ``` + +2. Setup integration test image: + + ```bash + # Build the base Docker image for integration test + make docker-integration + ``` + +#### Running Containerized Application + +For local smoke testing and runs: + +```bash +make docker-run-server +``` + +For integration, api or end to end testing: + +```bash +make docker-test-integration +``` + ## Reporting Bugs Or Issues 🐞 Before reporting a bug, please determine the type of issue you're encountering: @@ -180,7 +219,3 @@ We appreciate your contributions to Project-Healthcare! Your efforts help us mak Thank you! ❤️ ❤️ — The Project-Healthcare Team - - - - diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..b457aaf --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +# Use the base image for Python setup +# Reuse the stage from Dockerfile.build +FROM xcov19-setup AS run + +USER nonroot:nonroot + +# Set the start command +ARG START_CMD="make run" +ENV START_CMD=${START_CMD} +RUN if [ -z "${START_CMD}" ]; then echo "Unable to detect a container start command" && exit 1; fi +CMD ${START_CMD} \ No newline at end of file diff --git a/Dockerfile.build b/Dockerfile.build new file mode 100644 index 0000000..1927d99 --- /dev/null +++ b/Dockerfile.build @@ -0,0 +1,120 @@ +# Use the base image specified +ARG VERSION=3.12.6 +ARG BUILDER=docker.io/library/python +FROM ${BUILDER}:${VERSION}-slim AS python-base + +# Install build dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + ca-certificates \ + curl \ + git \ + libbz2-dev \ + libffi-dev \ + libgdal-dev \ + libgeos-dev \ + liblzma-dev \ + libncursesw5-dev \ + libproj-dev \ + libreadline-dev \ + libsqlite3-dev \ + libsqlite3-mod-spatialite \ + libssl-dev \ + libxml2-dev \ + libxmlsec1-dev \ + pkg-config \ + tk-dev \ + unzip \ + uuid-dev \ + wget \ + zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* + +# Update CA certificates +RUN update-ca-certificates 2>/dev/null || true + +# Set environment variables +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +# Compile SQLite with loadable extension support +ENV SQLITE_VERSION=3460100 +RUN mkdir -p /build && cd /build && \ + wget --max-redirect=0 --secure-protocol=TLSv1_2 https://www.sqlite.org/2024/sqlite-amalgamation-${SQLITE_VERSION}.zip && \ + unzip sqlite-amalgamation-${SQLITE_VERSION}.zip && \ + rm sqlite-amalgamation-${SQLITE_VERSION}.zip && \ + cd sqlite-amalgamation-${SQLITE_VERSION} && \ + gcc -DSQLITE_THREADSAFE=0 -DSQLITE_ENABLE_FTS4 \ + -DSQLITE_ENABLE_FTS5 -DSQLITE_ENABLE_JSON1 \ + -DSQLITE_ENABLE_RTREE -DSQLITE_ENABLE_EXPLAIN_COMMENTS \ + -DHAVE_READLINE -DSQLITE_ENABLE_DBSTAT_VTAB \ + shell.c sqlite3.c -ldl -lm -lreadline -lncurses -o sqlite3 && \ + rm -rf /build + + +# Recompile Python to link against the custom SQLite +ENV PYTHON_VERSION=3.12.6 +RUN mkdir -p /build && cd /build && \ + wget --max-redirect=0 --secure-protocol=TLSv1_2 -q https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz && \ + tar xzf Python-${PYTHON_VERSION}.tgz && \ + cd Python-${PYTHON_VERSION} && \ + ./configure \ + --enable-optimizations \ + --with-ensurepip=install \ + --enable-loadable-sqlite-extensions \ + LDFLAGS="-L/usr/local/lib" \ + CPPFLAGS="-I/usr/local/include" \ + PKG_CONFIG_PATH="/usr/local/lib/pkgconfig" && \ + make -j"$(nproc)" && \ + make altinstall && \ + rm -rf /build + +# Update alternatives to point to the new Python +RUN ln -sf /usr/local/bin/python${PYTHON_VERSION%.*} /usr/local/bin/python3 +RUN ln -sf /usr/local/bin/pip${PYTHON_VERSION%.*} /usr/local/bin/pip3 + +# Use the base image for Python setup +# Reuse the stage from Dockerfile.build +FROM python-base AS xcov19-setup + +# Set the working directory +WORKDIR /app +# Create nonroot user and group +RUN addgroup --system nonroot && adduser --system --ingroup nonroot nonroot + +# Change ownership of /app and /var/cache +RUN chown -R nonroot:nonroot /app +RUN mkdir -p /var/cache +RUN chown -R nonroot:nonroot /var/cache + +# Copy the application code +COPY --chown=nonroot:nonroot --chmod=555 xcov19 xcov19/ +COPY --chown=nonroot:nonroot --chmod=555 Makefile . +COPY --chown=nonroot:nonroot --chmod=555 pyproject.toml . +COPY --chown=nonroot:nonroot --chmod=555 poetry.lock . +COPY --chown=nonroot:nonroot --chmod=555 *.sh . +COPY --chown=nonroot:nonroot --chmod=555 LICENSE . + +ENV POETRY_NO_INTERACTION=1 +ENV POETRY_VIRTUALENVS_CREATE=false +ENV POETRY_CACHE_DIR='/var/cache/pypoetry' +ENV POETRY_HOME='/usr/local' + +# Install Poetry using the recompiled Python +RUN curl --proto "=https" --tlsv1.2 -sSf -L https://install.python-poetry.org | python3 - + +# Change ownership of Poetry's cache and configuration directories +RUN mkdir -p /var/cache/pypoetry && chown -R nonroot:nonroot /var/cache/pypoetry +RUN chown -R nonroot:nonroot /usr/local/ && chmod -R 755 /usr/local/ + +# Switch to nonroot user +USER nonroot:nonroot + + +# Install project dependencies +ARG INSTALL_CMD="poetry install --only main --no-root --no-ansi" +RUN if [ -z "${INSTALL_CMD}" ]; then echo "Unable to start poetry install command" && exit 1; fi +RUN if [ -f "poetry.lock" ]; then \ + echo "poetry lock exists. updating" && \ + chmod 755 poetry.lock && poetry lock --no-update; fi; +RUN ${INSTALL_CMD} \ No newline at end of file diff --git a/Dockerfile.test-integration b/Dockerfile.test-integration new file mode 100644 index 0000000..126a978 --- /dev/null +++ b/Dockerfile.test-integration @@ -0,0 +1,16 @@ +# Use the base image for Python setup +# Reuse the stage from Dockerfile.build +FROM xcov19-setup AS test-integration + +# Switch to nonroot user +USER nonroot:nonroot + +ARG INSTALL_CMD="poetry install --no-root --no-ansi --extras=test" +RUN if [ -z "${INSTALL_CMD}" ]; then echo "Unable to start poetry install command" && exit 1; fi +RUN if [ -f "poetry.lock" ]; then echo "poetry lock exists. updating" && poetry lock --no-update; fi; +RUN ${INSTALL_CMD} + +ARG START_CMD="make test-integration" +ENV START_CMD=${START_CMD} +RUN if [ -z "${START_CMD}" ]; then echo "Unable to detect a container start command" && exit 1; fi +CMD ${START_CMD} \ No newline at end of file diff --git a/Makefile b/Makefile index 00ce2a1..3f9dc11 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,6 @@ +XCOV19_SETUP_IMAGE := xcov19-setup +XCOV19_TEST_INTEGRATION_SETUP_IMAGE := xcov19-integration-test + check: @bash check.sh @@ -17,4 +20,16 @@ test-integration: APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "integration" todos: - @grep -rn "TODO:" xcov19/ --exclude-dir=node_modules --include="*.py" \ No newline at end of file + @grep -rn "TODO:" xcov19/ --exclude-dir=node_modules --include="*.py" + +docker-build: + docker build --load -f Dockerfile.build -t $(XCOV19_SETUP_IMAGE) . + +docker-integration: + docker build --load -f Dockerfile.test-integration -t $(XCOV19_TEST_INTEGRATION_SETUP_IMAGE) . + +docker-run-server: + docker compose -f docker-compose.yml up --build + +docker-test-integration: + make docker-integration && docker run -it -f Dockerfile.test-integration diff --git a/README.md b/README.md index 189de6c..2d827c7 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ Released under [LGPL-2.1](/LICENSE) by [@Xcov19](https://github.com/Xcov19). # Project Healthcare (xcov19) Project Healthcare, from hereon called the brokering service, is a set of upstream OpenAPI specification to extend any patient facing user interface looking to integrate to location-aware consultation and diagnostics facilities near them by: + 1. Exposing a diagnosis API to capture patient symptoms. 2. Exposing geolocation API to caputure patient's location and offer nearby facilities to take action on based on their diagnosis and location. @@ -42,6 +43,7 @@ web framework to start Web APIs, the project structure adheres blacksheep's doma domain driven design philosophy using ports and adapters pattern so expect slight shift towards domain models and services structure. The specification follows a sandwich service model i.e. it requires one or more upstream producer services and one downstream consumer service as follows: + 1. The patient facing application, known from hereon as the downstream consumer service, calls the diagnosis and geolocation API. 2. The brokering service stores transient diagnosis request and enqueues them to upstream provider service that should return records of facilities and their specialties based on the diagnosis. 3. The brokering service returns the records of matching facilities to the downstream consumer service. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..807e410 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,17 @@ +version: "3" + +x-shared-config: &shared-config + ports: + - "${PORT:-44777}:44777" + restart: always + logging: + options: + max-size: 0.25g + max-file: 2 + +services: + xcov19-app: + <<: *shared-config + build: + context: . + dockerfile: Dockerfile \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 4e4d5fc..5d379f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,11 +19,11 @@ sqlalchemy = {version="^2.0.34", markers = "sys_platform != 'win32'", extras = [ alembic = "^1.13.2" aiosqlite = "^0.20.0" sqlmodel = {version="^0.0.22"} +rich = {version = "^13.8.0"} ruff = { version = "^0.6.3", optional = true } mypy = { version = "^1.11.2", optional = true } blacksheep-cli = { version = "^0.0.4", optional = true } -rich = { version = "^13.8.0", optional = true } pyright = { version = "^1.1.379", optional = true } pre-commit = { version="^3.7.1", optional = true } pytest-asyncio = { version = "^0.24.0", optional = true } diff --git a/run.sh b/run.sh index e7ca90c..912f7ad 100755 --- a/run.sh +++ b/run.sh @@ -1,3 +1,3 @@ #!/bin/bash -APP_ENV=dev APP_DB_ENGINE_URL="sqlite+aiosqlite:///xcov19.db" poetry run python3 -m xcov19.dev \ No newline at end of file +if [ -f "xcov19.db" ]; then rm xcov19.db; fi; APP_ENV=dev APP_DB_ENGINE_URL="sqlite+aiosqlite:///xcov19.db" poetry run python3 -m xcov19.dev \ No newline at end of file From 71f5c43f03e53822af37701d004ed461c8020122 Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 23 Sep 2024 00:05:56 +0530 Subject: [PATCH 26/27] these changes reflect a working spatialite extension to sqlite. the tabels are created in container and spatialite is loaded successfully --- Dockerfile | 10 + Dockerfile.build | 13 +- Dockerfile.test-integration | 10 + Makefile | 7 +- docker-compose.yml | 6 +- poetry.lock | 569 ++++++++++++++++--------------- pyproject.toml | 7 +- run.sh | 9 +- xcov19/app/database.py | 38 +++ xcov19/domain/models/provider.py | 3 + xcov19/infra/models.py | 90 ++++- xcov19/tests/data/seed_db.py | 14 +- xcov19/tests/start_server.py | 4 +- xcov19/tests/test_services.py | 126 ++++--- 14 files changed, 551 insertions(+), 355 deletions(-) diff --git a/Dockerfile b/Dockerfile index b457aaf..b71358f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,16 @@ # Reuse the stage from Dockerfile.build FROM xcov19-setup AS run +# Set the working directory +WORKDIR /app + +# Bust cached build if --build CACHEBUST= is passed +# to ensure updated source code is built +ARG CACHEBUST +COPY --chown=nonroot:nonroot --chmod=555 xcov19 xcov19/ +COPY --chown=nonroot:nonroot --chmod=555 Makefile . +COPY --chown=nonroot:nonroot --chmod=555 *.sh . + USER nonroot:nonroot # Set the start command diff --git a/Dockerfile.build b/Dockerfile.build index 1927d99..0ecd31b 100644 --- a/Dockerfile.build +++ b/Dockerfile.build @@ -87,14 +87,6 @@ RUN chown -R nonroot:nonroot /app RUN mkdir -p /var/cache RUN chown -R nonroot:nonroot /var/cache -# Copy the application code -COPY --chown=nonroot:nonroot --chmod=555 xcov19 xcov19/ -COPY --chown=nonroot:nonroot --chmod=555 Makefile . -COPY --chown=nonroot:nonroot --chmod=555 pyproject.toml . -COPY --chown=nonroot:nonroot --chmod=555 poetry.lock . -COPY --chown=nonroot:nonroot --chmod=555 *.sh . -COPY --chown=nonroot:nonroot --chmod=555 LICENSE . - ENV POETRY_NO_INTERACTION=1 ENV POETRY_VIRTUALENVS_CREATE=false ENV POETRY_CACHE_DIR='/var/cache/pypoetry' @@ -107,6 +99,11 @@ RUN curl --proto "=https" --tlsv1.2 -sSf -L https://install.python-poetry.org | RUN mkdir -p /var/cache/pypoetry && chown -R nonroot:nonroot /var/cache/pypoetry RUN chown -R nonroot:nonroot /usr/local/ && chmod -R 755 /usr/local/ +# Copy the application code +COPY --chown=nonroot:nonroot --chmod=555 pyproject.toml . +COPY --chown=nonroot:nonroot --chmod=555 poetry.lock . +COPY --chown=nonroot:nonroot --chmod=555 LICENSE . + # Switch to nonroot user USER nonroot:nonroot diff --git a/Dockerfile.test-integration b/Dockerfile.test-integration index 126a978..14ae986 100644 --- a/Dockerfile.test-integration +++ b/Dockerfile.test-integration @@ -2,6 +2,16 @@ # Reuse the stage from Dockerfile.build FROM xcov19-setup AS test-integration +# Set the working directory +WORKDIR /app + +# Bust cached build if --build CACHEBUST= is passed +# to ensure updated source code is built +ARG CACHEBUST=1 +COPY --chown=nonroot:nonroot --chmod=555 xcov19 xcov19/ +COPY --chown=nonroot:nonroot --chmod=555 Makefile . +COPY --chown=nonroot:nonroot --chmod=555 *.sh . + # Switch to nonroot user USER nonroot:nonroot diff --git a/Makefile b/Makefile index 3f9dc11..59678a5 100644 --- a/Makefile +++ b/Makefile @@ -17,11 +17,14 @@ test: APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "not slow and not integration and not api" test-integration: - APP_ENV=test APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "integration" + APP_ENV=test PYTHON_CONFIGURE_OPTS="--enable-loadable-sqlite-extensions" APP_DB_ENGINE_URL="sqlite+aiosqlite://" pytest -s xcov19/tests/ -m "integration" todos: @grep -rn "TODO:" xcov19/ --exclude-dir=node_modules --include="*.py" +set-docker: + @bash set_docker.sh + docker-build: docker build --load -f Dockerfile.build -t $(XCOV19_SETUP_IMAGE) . @@ -29,7 +32,7 @@ docker-integration: docker build --load -f Dockerfile.test-integration -t $(XCOV19_TEST_INTEGRATION_SETUP_IMAGE) . docker-run-server: - docker compose -f docker-compose.yml up --build + docker compose -f docker-compose.yml up --build --remove-orphans docker-test-integration: make docker-integration && docker run -it -f Dockerfile.test-integration diff --git a/docker-compose.yml b/docker-compose.yml index 807e410..9b069b2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,8 +10,10 @@ x-shared-config: &shared-config max-file: 2 services: - xcov19-app: + app: <<: *shared-config build: context: . - dockerfile: Dockerfile \ No newline at end of file + dockerfile: Dockerfile + args: + CACHEBUST: ${CACHEBUST:-$(date +%s)} \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index d4d3e18..886b47d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiosqlite" @@ -50,13 +50,13 @@ files = [ [[package]] name = "anyio" -version = "4.4.0" +version = "4.5.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = true python-versions = ">=3.8" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, + {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, ] [package.dependencies] @@ -64,9 +64,9 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "arrow" @@ -518,85 +518,100 @@ full = ["click (>=8.1.3,<8.2.0)", "httpx (<1)", "jinja2 (>=3.1.2,<3.2.0)", "rich [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = true python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -741,13 +756,13 @@ files = [ [[package]] name = "identify" -version = "2.6.0" +version = "2.6.1" description = "File identification library for Python" optional = true python-versions = ">=3.8" files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -755,15 +770,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = true python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -826,7 +844,7 @@ testing = ["pytest"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, @@ -919,7 +937,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, @@ -1032,13 +1050,13 @@ test = ["Faker (>=1.0.8)", "allpairspy (>=2)", "click (>=6.2)", "pytest (>=6.0.1 [[package]] name = "platformdirs" -version = "4.3.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = true python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, - {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] @@ -1106,18 +1124,18 @@ wcwidth = "*" [[package]] name = "pydantic" -version = "2.9.1" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, - {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.3" +pydantic-core = "2.23.4" typing-extensions = [ {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, @@ -1129,100 +1147,100 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.3" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, - {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, - {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, - {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, - {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, - {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, - {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, - {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, - {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, - {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, - {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, - {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, - {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, - {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, - {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, - {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, - {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, - {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, - {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, - {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, - {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, - {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, - {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, - {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, - {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, - {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, - {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, - {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, - {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, - {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, - {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, - {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, - {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, - {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, - {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, - {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, - {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, - {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, - {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, - {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, - {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, - {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, - {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, - {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, - {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -1230,13 +1248,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.4.0" +version = "2.5.2" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, + {file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"}, + {file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"}, ] [package.dependencies] @@ -1252,7 +1270,7 @@ yaml = ["pyyaml (>=6.0.1)"] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, @@ -1264,13 +1282,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyright" -version = "1.1.379" +version = "1.1.381" description = "Command line wrapper for pyright" optional = true python-versions = ">=3.7" files = [ - {file = "pyright-1.1.379-py3-none-any.whl", hash = "sha256:01954811ac71db8646f50de1577576dc275ffb891a9e7324350e676cf6df323f"}, - {file = "pyright-1.1.379.tar.gz", hash = "sha256:6f426cb6443786fa966b930c23ad1941c8cb9fe672e4589daea8d80bb34193ea"}, + {file = "pyright-1.1.381-py3-none-any.whl", hash = "sha256:5dc0aa80a265675d36abab59c674ae01dbe476714f91845b61b841d34aa99081"}, + {file = "pyright-1.1.381.tar.gz", hash = "sha256:314cf0c1351c189524fb10c7ac20688ecd470e8cc505c394d642c9c80bf7c3a5"}, ] [package.dependencies] @@ -1282,13 +1300,13 @@ dev = ["twine (>=3.4.1)"] [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = true python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1462,13 +1480,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.8.0" +version = "13.8.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = true +optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, - {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, ] [package.dependencies] @@ -1511,29 +1529,29 @@ files = [ [[package]] name = "ruff" -version = "0.6.3" +version = "0.6.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = true python-versions = ">=3.7" files = [ - {file = "ruff-0.6.3-py3-none-linux_armv6l.whl", hash = "sha256:97f58fda4e309382ad30ede7f30e2791d70dd29ea17f41970119f55bdb7a45c3"}, - {file = "ruff-0.6.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3b061e49b5cf3a297b4d1c27ac5587954ccb4ff601160d3d6b2f70b1622194dc"}, - {file = "ruff-0.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:34e2824a13bb8c668c71c1760a6ac7d795ccbd8d38ff4a0d8471fdb15de910b1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bddfbb8d63c460f4b4128b6a506e7052bad4d6f3ff607ebbb41b0aa19c2770d1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ced3eeb44df75353e08ab3b6a9e113b5f3f996bea48d4f7c027bc528ba87b672"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47021dff5445d549be954eb275156dfd7c37222acc1e8014311badcb9b4ec8c1"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d7bd20dc07cebd68cc8bc7b3f5ada6d637f42d947c85264f94b0d1cd9d87384"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:500f166d03fc6d0e61c8e40a3ff853fa8a43d938f5d14c183c612df1b0d6c58a"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42844ff678f9b976366b262fa2d1d1a3fe76f6e145bd92c84e27d172e3c34500"}, - {file = "ruff-0.6.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70452a10eb2d66549de8e75f89ae82462159855e983ddff91bc0bce6511d0470"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65a533235ed55f767d1fc62193a21cbf9e3329cf26d427b800fdeacfb77d296f"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2e2c23cef30dc3cbe9cc5d04f2899e7f5e478c40d2e0a633513ad081f7361b5"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8a136aa7d228975a6aee3dd8bea9b28e2b43e9444aa678fb62aeb1956ff2351"}, - {file = "ruff-0.6.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f92fe93bc72e262b7b3f2bba9879897e2d58a989b4714ba6a5a7273e842ad2f8"}, - {file = "ruff-0.6.3-py3-none-win32.whl", hash = "sha256:7a62d3b5b0d7f9143d94893f8ba43aa5a5c51a0ffc4a401aa97a81ed76930521"}, - {file = "ruff-0.6.3-py3-none-win_amd64.whl", hash = "sha256:746af39356fee2b89aada06c7376e1aa274a23493d7016059c3a72e3b296befb"}, - {file = "ruff-0.6.3-py3-none-win_arm64.whl", hash = "sha256:14a9528a8b70ccc7a847637c29e56fd1f9183a9db743bbc5b8e0c4ad60592a82"}, - {file = "ruff-0.6.3.tar.gz", hash = "sha256:183b99e9edd1ef63be34a3b51fee0a9f4ab95add123dbf89a71f7b1f0c991983"}, + {file = "ruff-0.6.6-py3-none-linux_armv6l.whl", hash = "sha256:f5bc5398457484fc0374425b43b030e4668ed4d2da8ee7fdda0e926c9f11ccfb"}, + {file = "ruff-0.6.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:515a698254c9c47bb84335281a170213b3ee5eb47feebe903e1be10087a167ce"}, + {file = "ruff-0.6.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6bb1b4995775f1837ab70f26698dd73852bbb82e8f70b175d2713c0354fe9182"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c546f412dfae8bb9cc4f27f0e45cdd554e42fecbb34f03312b93368e1cd0a6"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59627e97364329e4eae7d86fa7980c10e2b129e2293d25c478ebcb861b3e3fd6"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94c3f78c3d32190aafbb6bc5410c96cfed0a88aadb49c3f852bbc2aa9783a7d8"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:704da526c1e137f38c8a067a4a975fe6834b9f8ba7dbc5fd7503d58148851b8f"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:efeede5815a24104579a0f6320660536c5ffc1c91ae94f8c65659af915fb9de9"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e368aef0cc02ca3593eae2fb8186b81c9c2b3f39acaaa1108eb6b4d04617e61f"}, + {file = "ruff-0.6.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2653fc3b2a9315bd809725c88dd2446550099728d077a04191febb5ea79a4f79"}, + {file = "ruff-0.6.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:bb858cd9ce2d062503337c5b9784d7b583bcf9d1a43c4df6ccb5eab774fbafcb"}, + {file = "ruff-0.6.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:488f8e15c01ea9afb8c0ba35d55bd951f484d0c1b7c5fd746ce3c47ccdedce68"}, + {file = "ruff-0.6.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:aefb0bd15f1cfa4c9c227b6120573bb3d6c4ee3b29fb54a5ad58f03859bc43c6"}, + {file = "ruff-0.6.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a4c0698cc780bcb2c61496cbd56b6a3ac0ad858c966652f7dbf4ceb029252fbe"}, + {file = "ruff-0.6.6-py3-none-win32.whl", hash = "sha256:aadf81ddc8ab5b62da7aae78a91ec933cbae9f8f1663ec0325dae2c364e4ad84"}, + {file = "ruff-0.6.6-py3-none-win_amd64.whl", hash = "sha256:0adb801771bc1f1b8cf4e0a6fdc30776e7c1894810ff3b344e50da82ef50eeb1"}, + {file = "ruff-0.6.6-py3-none-win_arm64.whl", hash = "sha256:4b4d32c137bc781c298964dd4e52f07d6f7d57c03eae97a72d97856844aa510a"}, + {file = "ruff-0.6.6.tar.gz", hash = "sha256:0fc030b6fd14814d69ac0196396f6761921bd20831725c7361e1b8100b818034"}, ] [[package]] @@ -1558,62 +1576,73 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "spatialite" +version = "0.0.3" +description = "Wrapper of sqlite3 module which adds SpatiaLite support." +optional = false +python-versions = "*" +files = [ + {file = "spatialite-0.0.3-py3-none-any.whl", hash = "sha256:3124f643688c8ba4e2ff200ef401cf7b57caa44db666cb78519784f98f662982"}, + {file = "spatialite-0.0.3.tar.gz", hash = "sha256:a0761f239a52f326b14ce41ba61b6614dfcc808b978a0bec4a37c1de9ad9071e"}, +] + [[package]] name = "sqlalchemy" -version = "2.0.34" +version = "2.0.35" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, - {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, - {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, ] [package.dependencies] @@ -1673,13 +1702,13 @@ files = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20240821" +version = "2.9.0.20240906" description = "Typing stubs for python-dateutil" optional = true python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20240821.tar.gz", hash = "sha256:9649d1dcb6fef1046fb18bebe9ea2aa0028b160918518c34589a46045f6ebd98"}, - {file = "types_python_dateutil-2.9.0.20240821-py3-none-any.whl", hash = "sha256:f5889fcb4e63ed4aaa379b44f93c32593d50b9a94c9a60a0c854d8cc3511cd57"}, + {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, + {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, ] [[package]] @@ -1695,13 +1724,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = true python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1756,13 +1785,13 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.5" description = "Virtual Python Environment builder" optional = true python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, + {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, ] [package.dependencies] @@ -1807,4 +1836,4 @@ test = ["anyio", "pytest", "pytest-asyncio"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "a217ad1a23f62af30cd5370f64da6495922435b41d8302e564e1adf07cfffd01" +content-hash = "bb27d3f3dc0498bac539a02e31df386ff54c86ab0ed109fbdccd19aada316c92" diff --git a/pyproject.toml b/pyproject.toml index 5d379f5..fc18c92 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,7 @@ authors = ["codecakes "] readme = "README.md" package-mode = true + [tool.poetry.dependencies] python = "^3.12" pydantic = "^2.9.1" @@ -20,6 +21,7 @@ alembic = "^1.13.2" aiosqlite = "^0.20.0" sqlmodel = {version="^0.0.22"} rich = {version = "^13.8.0"} +spatialite = "^0.0.3" ruff = { version = "^0.6.3", optional = true } mypy = { version = "^1.11.2", optional = true } @@ -66,8 +68,9 @@ include = [ ] exclude = [ "**/node_modules", - "**/__pycache__"] -venv = "xcov19-7M_0Y8Vx-py3.12" + "**/__pycache__" + ] +# venv = "xcov19-7M_0Y8Vx-py3.12" reportMissingImports = true [[tool.pyright.executionEnvironments]] diff --git a/run.sh b/run.sh index 912f7ad..52fd0c7 100755 --- a/run.sh +++ b/run.sh @@ -1,3 +1,10 @@ #!/bin/bash -if [ -f "xcov19.db" ]; then rm xcov19.db; fi; APP_ENV=dev APP_DB_ENGINE_URL="sqlite+aiosqlite:///xcov19.db" poetry run python3 -m xcov19.dev \ No newline at end of file +echo "listing all files"; +ls; +if [ -f "xcov19.db" ]; then + echo "removing database"; + rm xcov19.db; +fi; + +APP_ENV=dev APP_DB_ENGINE_URL="sqlite+aiosqlite:///xcov19.db" poetry run python3 -m xcov19.dev \ No newline at end of file diff --git a/xcov19/app/database.py b/xcov19/app/database.py index 08879a9..ba3f7de 100644 --- a/xcov19/app/database.py +++ b/xcov19/app/database.py @@ -1,6 +1,8 @@ +import asyncio from collections.abc import AsyncGenerator from contextlib import asynccontextmanager import sys +import aiosqlite from rodi import Container from xcov19.infra.models import SQLModel from sqlmodel import text @@ -11,9 +13,11 @@ AsyncEngine, async_sessionmaker, ) +from sqlalchemy.dialects.sqlite.aiosqlite import AsyncAdapt_aiosqlite_connection import logging from sqlalchemy.pool import AsyncAdaptedQueuePool +from sqlalchemy import event db_logger = logging.getLogger(__name__) db_fmt = logging.Formatter( @@ -47,9 +51,43 @@ def __call__(self) -> async_sessionmaker[AsyncSessionWrapper]: async def setup_database(engine: AsyncEngine) -> None: """Sets up tables for database.""" + + @event.listens_for(engine.sync_engine, "connect") + def load_spatialite( + dbapi_conn: AsyncAdapt_aiosqlite_connection, _connection_record + ): + loop = asyncio.get_running_loop() + + async def load_async_extension(): + conn: aiosqlite.Connection = dbapi_conn.driver_connection + await conn.enable_load_extension(True) + await conn.load_extension("mod_spatialite") + db_logger.info("======= PRAGMA load_extension successful =======") + try: + async with conn.execute( + "SELECT spatialite_version() as version" + ) as cursor: + result = await cursor.fetchone() + db_logger.info(f"==== Spatialite Version: {result} ====") + db_logger.info("===== mod_spatialite loaded =====") + except (AttributeError, aiosqlite.OperationalError) as e: + db_logger.error(e) + raise (e) + + # Schedule the coroutine in the existing event loop + loop.create_task(load_async_extension()) + async with engine.begin() as conn: + # Enable extension loading + await conn.execute(text("PRAGMA load_extension = 1")) + # db_logger.info("SQLAlchemy setup to load the SpatiaLite extension.") + # await conn.execute(text("SELECT load_extension('/opt/homebrew/Cellar/libspatialite/5.1.0_1/lib/mod_spatialite.dylib')")) + # await conn.execute(text("SELECT load_extension('mod_spatialite')")) # see: https://sqlmodel.tiangolo.com/tutorial/relationship-attributes/cascade-delete-relationships/#enable-foreign-key-support-in-sqlite await conn.execute(text("PRAGMA foreign_keys=ON")) + # test_result = await conn.execute(text("SELECT spatialite_version() as version;")) + # print(f"==== Spatialite Version: {test_result.fetchone()} ====") + await conn.run_sync(SQLModel.metadata.create_all) await conn.commit() db_logger.info("===== Database tables setup. =====") diff --git a/xcov19/domain/models/provider.py b/xcov19/domain/models/provider.py index 73f14dc..35001ef 100644 --- a/xcov19/domain/models/provider.py +++ b/xcov19/domain/models/provider.py @@ -28,6 +28,7 @@ class FacilityOwnership(enum.StrEnum): type Specialties = List[str] type Qualification = List[str] type PracticeExpYears = int | float +type MoneyType = int | float @dataclass @@ -73,6 +74,7 @@ class Doctor: specialties: Specialties degree: Qualification experience: PracticeExpYears + fee: MoneyType @dataclass @@ -84,5 +86,6 @@ class Provider: facility_type: FacilityType ownership: FacilityOwnerType specialties: Specialties + available_doctors: List[Doctor] stars: Annotated[int, Stars(min_rating=1, max_rating=5)] reviews: Annotated[int, Reviews(value=0)] diff --git a/xcov19/infra/models.py b/xcov19/infra/models.py index 2e813a0..6d901db 100644 --- a/xcov19/infra/models.py +++ b/xcov19/infra/models.py @@ -34,14 +34,63 @@ from __future__ import annotations -from typing import List +import json +from typing import Annotated, Dict, List, Tuple, Any +from pydantic import GetCoreSchemaHandler, TypeAdapter +from pydantic_core import CoreSchema, core_schema +from sqlalchemy.sql.elements import ColumnElement +from sqlalchemy.sql.type_api import _BindProcessorType from sqlmodel import SQLModel, Field, Relationship -from sqlalchemy import Column, Text, Float, Index +from sqlalchemy import BindParameter, Column, Dialect, Text, Float, Index, func from sqlalchemy.orm import relationship, Mapped import uuid -from sqlalchemy.dialects.sqlite import TEXT +from sqlalchemy.dialects.sqlite import TEXT, NUMERIC, JSON, INTEGER +from sqlalchemy.types import UserDefinedType +class PointType(UserDefinedType): + """Defines a geopoint type. + + It also sets the type as a pydantic type when plugged into TypeAdapter. + """ + + def get_col_spec(self): + return "POINT" + + def result_processor(self, dialect: Dialect, coltype: Any) -> Any | None: + def process(value): + if not value: + return None + parsed_value = value[6:-1].split() + return tuple(map(float, parsed_value)) + + return process + + def bind_processor(self, dialect: Dialect) -> _BindProcessorType | None: + def process(value): + if not value: + return None + lat, lng = value + return f"POINT({lat} {lng})" + + return process + + def bind_expression(self, bindvalue: BindParameter) -> ColumnElement | None: + return func.GeomFromText(bindvalue, type_=self) + + @classmethod + def __get_pydantic_core_schema__( + cls, _source_type: Tuple, handler: GetCoreSchemaHandler + ) -> CoreSchema: + """Pydantic validates the data as a tuple.""" + return core_schema.no_info_after_validator_function(cls, handler(tuple)) + + @classmethod + def pydantic_adapter(cls) -> TypeAdapter: + return TypeAdapter(cls) + + +### These tables map to the domain models for Patient class Patient(SQLModel, table=True): patient_id: str = Field( sa_column=Column( @@ -50,7 +99,6 @@ class Patient(SQLModel, table=True): allow_mutation=False, ) queries: Mapped[List["Query"]] = Relationship( - # back_populates="patient", passive_deletes="all", cascade_delete=True, sa_relationship=relationship(back_populates="patient"), @@ -88,18 +136,42 @@ class Location(SQLModel, table=True): latitude: float = Field(sa_column=Column(Float)) longitude: float = Field(sa_column=Column(Float)) queries: Mapped[List["Query"]] = Relationship( - # back_populates="location", cascade_delete=True, passive_deletes=True, sa_relationship=relationship(back_populates="location"), ) -# TODO: Define Provider SQL model fields -# class Provider(SQLModel, table=True): -# # TODO: Compare with Github issue, domain model and noccodb -# ... +### + + +### These tables map to the domain models for Provider +class Provider(SQLModel, table=True): + provider_id: str = Field( + sa_column=Column( + TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) + ), + allow_mutation=False, + ) + name: str = Field( + sa_column=Column(TEXT, nullable=False), + ) + address: str = Field(sa_column=Column(TEXT, nullable=False), allow_mutation=False) + geopoint: Annotated[ + tuple, lambda geom: PointType.pydantic_adapter().validate_python(geom) + ] = Field(sa_column=Column(PointType, nullable=False), allow_mutation=False) + contact: str = Field(sa_column=Column(NUMERIC, nullable=False)) + facility_type: str = Field(sa_column=Column(TEXT, nullable=False)) + ownership_type: str = Field(sa_column=Column(TEXT, nullable=False)) + specialties: List[str] = Field(sa_column=Column(JSON, nullable=False)) + stars: int = Field(sa_column=Column(INTEGER, nullable=False, default=0)) + reviews: int = Field(sa_column=Column(INTEGER, nullable=False, default=0)) + available_doctors: List[Dict[str, str | int | float | list]] = Field( + sa_column=Column(JSON, nullable=False, default=json.dumps([])) + ) + +### # TODO: Add Model events for database ops during testing # @event.listens_for(Query, "after_delete") diff --git a/xcov19/tests/data/seed_db.py b/xcov19/tests/data/seed_db.py index 0c451c2..fb38c7b 100644 --- a/xcov19/tests/data/seed_db.py +++ b/xcov19/tests/data/seed_db.py @@ -1,12 +1,5 @@ """Dummy data to seed to database models. Mapped to SQLModel. - -dummy GeoLocation: -lat=0 -lng=0 - -cust_id=test_cust_id -query_id=test_query_id """ from sqlalchemy import ScalarResult @@ -17,6 +10,13 @@ async def seed_data(session: AsyncSessionWrapper): """ + dummy GeoLocation: + lat=0 + lng=0 + + cust_id=test_cust_id + query_id=test_query_id + Now you can do: res = await self._session.exec(select(Query)) query = res.first() diff --git a/xcov19/tests/start_server.py b/xcov19/tests/start_server.py index 4ee8768..9d01e16 100644 --- a/xcov19/tests/start_server.py +++ b/xcov19/tests/start_server.py @@ -35,7 +35,7 @@ async def start_server(app: Application) -> AsyncGenerator[Application, None]: await app.stop() -class setUpTestDatabase: +class SetUpTestDatabase: """Manages the lifecycle of the test database.""" def __init__(self) -> None: @@ -52,6 +52,7 @@ async def setup_test_database(self) -> None: await setup_database(engine) async def start_async_session(self) -> AsyncSession | AsyncSessionWrapper: + """Returns an asynchronous session.""" if not isinstance(self._container, Container): raise InvalidDIContainerTypeError("Container not of valid type.") self._session = await self._stack.enter_async_context( @@ -64,6 +65,7 @@ async def start_async_session(self) -> AsyncSession | AsyncSessionWrapper: return self._session async def aclose(self) -> None: + # TODO: replace print with logger print("async closing test server db session closing.") if not isinstance(self._session, AsyncSessionWrapper): raise InvalidSessionTypeError( diff --git a/xcov19/tests/test_services.py b/xcov19/tests/test_services.py index f3de758..0a36a51 100644 --- a/xcov19/tests/test_services.py +++ b/xcov19/tests/test_services.py @@ -5,10 +5,9 @@ import random -from xcov19.tests.data.seed_db import seed_data -from xcov19.tests.start_server import setUpTestDatabase from xcov19.domain.models.provider import ( Contact, + Doctor, FacilityEstablishment, FacilityOwnership, Provider, @@ -23,8 +22,6 @@ from xcov19.utils.mixins import InterfaceProtocolCheckMixin -from sqlmodel.ext.asyncio.session import AsyncSession as AsyncSessionWrapper - RANDOM_SEED = random.seed(1) @@ -39,6 +36,15 @@ def fetch_by_providers(self, **address: dict[str, str]) -> List[Provider]: contact=Contact("+1234567890"), facility_type=FacilityEstablishment.HOSPITAL, ownership=FacilityOwnership.PRIVATE, + available_doctors=[ + Doctor( + name="Dr. Smith", + specialties=["General"], + degree=["MBBS"], + experience=5, + fee=250.00, + ) + ], specialties=["General", "Surgery"], stars=4, reviews=100, @@ -56,6 +62,15 @@ def fetch_by_query( contact=Contact("+1234567890"), facility_type=FacilityEstablishment.HOSPITAL, ownership=FacilityOwnership.PRIVATE, + available_doctors=[ + Doctor( + name="Dr. Smith", + specialties=["General"], + degree=["MBBS"], + experience=5, + fee=250.00, + ) + ], specialties=["General", "Surgery"], stars=4, reviews=100, @@ -186,55 +201,60 @@ async def test_fetch_facilities_no_results(self): # @pytest.mark.skip(reason="WIP") -@pytest.mark.integration -@pytest.mark.usefixtures("dummy_reverse_geo_lookup_svc", "dummy_geolocation_query_json") -class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): - """Test case for Sqlite Repository to test Geolocation Service. - - Before testing, ensure to: - 1. Setup Database - 2. For fetch_facilities, relevant services are configured. - 3. patient_query_lookup_svc is configured to call sqlite repository. - """ - - @pytest.fixture(autouse=True) - def autouse( - self, - dummy_geolocation_query_json: LocationQueryJSON, - dummy_reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], - ): - self.dummy_geolocation_query_json = dummy_geolocation_query_json - self.dummy_reverse_geo_lookup_svc = dummy_reverse_geo_lookup_svc - - async def asyncSetUp(self) -> None: - self._test_db = setUpTestDatabase() - await self._test_db.setup_test_database() - self._session = await self._test_db.start_async_session() - assert isinstance(self._session, AsyncSessionWrapper) - await seed_data(self._session) - await super().asyncSetUp() - - async def asyncTearDown(self) -> None: - await self._test_db.aclose() - await super().asyncTearDown() - - def _patient_query_lookup_svc_using_repo( - self, address: Address, query: LocationQueryJSON - ) -> List[FacilitiesResult]: - # TODO: Implement a patient query lookup service - # that returns type List[FacilitiesResult] - ... - - async def test_fetch_facilities(self): - # TODO Implement test_fetch_facilities like this: - providers = await GeolocationQueryService.fetch_facilities( - self.dummy_reverse_geo_lookup_svc, - self.dummy_geolocation_query_json, - self._patient_query_lookup_svc_using_repo, - ) - assert providers - self.assertIsInstance(providers, list) - self.assertIs(len(providers), 1) +# @pytest.mark.integration +# @pytest.mark.usefixtures("dummy_reverse_geo_lookup_svc", "dummy_geolocation_query_json") +# class GeoLocationServiceSqlRepoDBTest(unittest.IsolatedAsyncioTestCase): +# """Test case for Sqlite Repository to test Geolocation Service. + +# Before testing, ensure to: +# 1. Setup Database +# 2. For fetch_facilities, relevant services are configured. +# 3. patient_query_lookup_svc is configured to call sqlite repository. +# """ + +# @pytest.fixture(autouse=True) +# def autouse( +# self, +# dummy_geolocation_query_json: LocationQueryJSON, +# dummy_reverse_geo_lookup_svc: Callable[[LocationQueryJSON], dict], +# ): +# self.dummy_geolocation_query_json = dummy_geolocation_query_json +# self.dummy_reverse_geo_lookup_svc = dummy_reverse_geo_lookup_svc + +# async def asyncSetUp(self) -> None: +# self._test_db = SetUpTestDatabase() +# await self._test_db.setup_test_database() +# self._session = await self._test_db.start_async_session() +# assert isinstance(self._session, AsyncSessionWrapper) +# await seed_data(self._session) +# await super().asyncSetUp() + +# async def asyncTearDown(self) -> None: +# await self._test_db.aclose() +# await super().asyncTearDown() + +# def _patient_query_lookup_svc_using_repo( +# self, address: Address, query: LocationQueryJSON +# ) -> List[FacilitiesResult]: +# # TODO: Implement a patient query lookup service +# # that returns type List[FacilitiesResult] +# repo: IProviderRepository = SqliteProviderRepo(self._session) +# # TODO: change repo fetch_by_providers to List facilties, not providers. +# providers: List[Provider] = repo.fetch_by_providers( +# **address.model_dump(round_trip=True) +# ) +# return repo.fetch_by_query(query.query_id.query_id, providers) + +# async def test_fetch_facilities(self): +# # TODO Implement test_fetch_facilities like this: +# facilities = await GeolocationQueryService.fetch_facilities( +# self.dummy_reverse_geo_lookup_svc, +# self.dummy_geolocation_query_json, +# self._patient_query_lookup_svc_using_repo, +# ) +# assert facilities +# self.assertIsInstance(facilities, list) +# self.assertIs(len(facilities), 1) @pytest.mark.usefixtures("dummy_geolocation_query_json", "dummy_reverse_geo_lookup_svc") From b98428ae9e6cf6c506e3b06628157c8d46f3e09c Mon Sep 17 00:00:00 2001 From: codecakes Date: Mon, 23 Sep 2024 00:48:28 +0530 Subject: [PATCH 27/27] WIP: Feature/fix infra spatialite (#79) Introduces changes to fix spatialite extension loading issue to support data fields in sqlachemy model type `POINT`. This change directly supports the work in #26 - Fixes spatialite extension as loadable with the updated dockerfile setup and correct implementation using aiosqlite compatibility - Temporary disables integration test. The repository return type needs to be refactored and return type changed to list dto facilities results Fix SpatiaLite extension loading issue to support spatial data fields in SQLAlchemy models. Introduce a new `PointType` for handling geopoint data. Refactor test database setup and temporarily disable integration tests pending refactoring. Update build scripts and Makefile for improved Docker handling. New Features: - Introduce a new `PointType` class to handle geopoint data types in SQLAlchemy models, enabling support for spatial data fields like `POINT`. Bug Fixes: - Fix the loading of the SpatiaLite extension in the database setup to support spatial data operations. Enhancements: - Refactor the test database setup to use a new `SetUpTestDatabase` class, improving the management of test database lifecycle and session handling. Build: - Update the Makefile to include a new `set-docker` target and modify the `docker-run-server` target to remove orphan containers. Tests: - Temporarily disable integration tests due to the need for refactoring repository return types and changing return types to list DTO facilities results. Chores: - Update the `run.sh` script to include additional logging for file listing and database removal. --------- Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> --- xcov19/app/database.py | 48 +++++++++++++++++------------------- xcov19/infra/models.py | 22 +++++++---------- xcov19/tests/data/seed_db.py | 3 ++- 3 files changed, 34 insertions(+), 39 deletions(-) diff --git a/xcov19/app/database.py b/xcov19/app/database.py index ba3f7de..3cf2099 100644 --- a/xcov19/app/database.py +++ b/xcov19/app/database.py @@ -49,45 +49,43 @@ def __call__(self) -> async_sessionmaker[AsyncSessionWrapper]: ) -async def setup_database(engine: AsyncEngine) -> None: - """Sets up tables for database.""" +async def _load_spatialite(dbapi_conn: AsyncAdapt_aiosqlite_connection) -> None: + """Loads spatialite sqlite extension.""" + conn: aiosqlite.Connection = dbapi_conn.driver_connection + await conn.enable_load_extension(True) + await conn.load_extension("mod_spatialite") + db_logger.info("======= PRAGMA load_extension successful =======") + try: + async with conn.execute("SELECT spatialite_version() as version") as cursor: + result = await cursor.fetchone() + db_logger.info(f"==== Spatialite Version: {result} ====") + db_logger.info("===== mod_spatialite loaded =====") + except (AttributeError, aiosqlite.OperationalError) as e: + db_logger.error(e) + raise (e) + + +def setup_spatialite(engine: AsyncEngine) -> None: + """An event listener hook to setup spatialite using aiosqlite.""" @event.listens_for(engine.sync_engine, "connect") def load_spatialite( dbapi_conn: AsyncAdapt_aiosqlite_connection, _connection_record ): loop = asyncio.get_running_loop() + # Schedule the coroutine in the existing event loop + loop.create_task(_load_spatialite(dbapi_conn)) - async def load_async_extension(): - conn: aiosqlite.Connection = dbapi_conn.driver_connection - await conn.enable_load_extension(True) - await conn.load_extension("mod_spatialite") - db_logger.info("======= PRAGMA load_extension successful =======") - try: - async with conn.execute( - "SELECT spatialite_version() as version" - ) as cursor: - result = await cursor.fetchone() - db_logger.info(f"==== Spatialite Version: {result} ====") - db_logger.info("===== mod_spatialite loaded =====") - except (AttributeError, aiosqlite.OperationalError) as e: - db_logger.error(e) - raise (e) - # Schedule the coroutine in the existing event loop - loop.create_task(load_async_extension()) +async def setup_database(engine: AsyncEngine) -> None: + """Sets up tables for database.""" + setup_spatialite(engine) async with engine.begin() as conn: # Enable extension loading await conn.execute(text("PRAGMA load_extension = 1")) - # db_logger.info("SQLAlchemy setup to load the SpatiaLite extension.") - # await conn.execute(text("SELECT load_extension('/opt/homebrew/Cellar/libspatialite/5.1.0_1/lib/mod_spatialite.dylib')")) - # await conn.execute(text("SELECT load_extension('mod_spatialite')")) # see: https://sqlmodel.tiangolo.com/tutorial/relationship-attributes/cascade-delete-relationships/#enable-foreign-key-support-in-sqlite await conn.execute(text("PRAGMA foreign_keys=ON")) - # test_result = await conn.execute(text("SELECT spatialite_version() as version;")) - # print(f"==== Spatialite Version: {test_result.fetchone()} ====") - await conn.run_sync(SQLModel.metadata.create_all) await conn.commit() db_logger.info("===== Database tables setup. =====") diff --git a/xcov19/infra/models.py b/xcov19/infra/models.py index 6d901db..f7dd11e 100644 --- a/xcov19/infra/models.py +++ b/xcov19/infra/models.py @@ -90,12 +90,14 @@ def pydantic_adapter(cls) -> TypeAdapter: return TypeAdapter(cls) +def generate_uuid() -> str: + return str(uuid.uuid4()) + + ### These tables map to the domain models for Patient class Patient(SQLModel, table=True): patient_id: str = Field( - sa_column=Column( - TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) - ), + sa_column=Column(TEXT, unique=True, primary_key=True, default=generate_uuid), allow_mutation=False, ) queries: Mapped[List["Query"]] = Relationship( @@ -109,9 +111,7 @@ class Query(SQLModel, table=True): """Every Query must have both a Patient and a Location.""" query_id: str = Field( - sa_column=Column( - TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) - ), + sa_column=Column(TEXT, unique=True, primary_key=True, default=generate_uuid), allow_mutation=False, ) query: str = Field(allow_mutation=False, sa_column=Column(Text)) @@ -128,9 +128,7 @@ class Location(SQLModel, table=True): Index("ix_location_composite_lat_lng", "latitude", "longitude", unique=True), ) location_id: str = Field( - sa_column=Column( - TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) - ), + sa_column=Column(TEXT, unique=True, primary_key=True, default=generate_uuid), allow_mutation=False, ) latitude: float = Field(sa_column=Column(Float)) @@ -148,9 +146,7 @@ class Location(SQLModel, table=True): ### These tables map to the domain models for Provider class Provider(SQLModel, table=True): provider_id: str = Field( - sa_column=Column( - TEXT, unique=True, primary_key=True, default=str(uuid.uuid4()) - ), + sa_column=Column(TEXT, unique=True, primary_key=True, default=generate_uuid), allow_mutation=False, ) name: str = Field( @@ -160,7 +156,7 @@ class Provider(SQLModel, table=True): geopoint: Annotated[ tuple, lambda geom: PointType.pydantic_adapter().validate_python(geom) ] = Field(sa_column=Column(PointType, nullable=False), allow_mutation=False) - contact: str = Field(sa_column=Column(NUMERIC, nullable=False)) + contact: int = Field(sa_column=Column(NUMERIC, nullable=False)) facility_type: str = Field(sa_column=Column(TEXT, nullable=False)) ownership_type: str = Field(sa_column=Column(TEXT, nullable=False)) specialties: List[str] = Field(sa_column=Column(JSON, nullable=False)) diff --git a/xcov19/tests/data/seed_db.py b/xcov19/tests/data/seed_db.py index fb38c7b..75162f6 100644 --- a/xcov19/tests/data/seed_db.py +++ b/xcov19/tests/data/seed_db.py @@ -9,7 +9,8 @@ async def seed_data(session: AsyncSessionWrapper): - """ + """Seeds database with initial data. + dummy GeoLocation: lat=0 lng=0