Compare commits

..

12 Commits

Author SHA1 Message Date
c7060c7ed3 Asset items 2023-08-27 23:13:24 +02:00
867a3c9733 gitignore 2023-08-27 14:52:54 +02:00
73f08f9cb0 Cleanup 2023-08-27 13:36:35 +02:00
7b16c2f606 Done 2023-08-27 13:35:29 +02:00
03c8aaa312 Cleanup 2023-08-27 11:29:12 +02:00
3c15f189f6 Companies 2023-08-27 11:22:45 +02:00
9700ff2607 Cleanup 2023-08-27 11:16:31 +02:00
14366bc1e1 Update makefile 2023-08-27 11:14:19 +02:00
eb37f09a44 Working version 2023-08-27 11:08:14 +02:00
9faca36ce1 Working version 2023-08-27 00:03:13 +02:00
7f1acec1af Working version 2023-08-26 14:38:33 +02:00
f4882cfb0c Initial 2023-08-26 12:46:19 +02:00
28 changed files with 1461 additions and 9 deletions

2
.gitignore vendored
View File

@ -2,3 +2,5 @@
/.idea
/.vscode
/.venv
__pycache__

View File

@ -8,8 +8,7 @@ run:
@ $(RUN_IN_ENV) uvicorn \
main:app \
--reload \
--reload-dir=service \
--lifespan=off
--reload-dir=app
shell:
@ $(RUN_IN_ENV) python manage.py shell

4
README.md Normal file
View File

@ -0,0 +1,4 @@
OpenAPI interface: http://localhost:8000/schema/swagger
Example project: https://github.com/litestar-org/litestar-pg-redis-docker/tree/main

View File

@ -0,0 +1,30 @@
from litestar import Router
from app.controllers.asset_item import AssetItemController
from app.controllers.company import CompanyController
from app.controllers.fiscal_payment_mapping import FiscalPaymentMappingController
from app.controllers.machine import MachineController
from app.domain.asset_item import AssetItem
from app.domain.company import Company
from app.domain.fiscal_payment_mapping import FiscalPaymentMapping
from app.domain.machine import Machine
__all__ = ["create_router"]
def create_router() -> Router:
return Router(
path="/v1",
route_handlers=[
CompanyController,
MachineController,
FiscalPaymentMappingController,
AssetItemController,
],
signature_namespace={
"Company": Company,
"Machine": Machine,
"FiscalPaymentMapping": FiscalPaymentMapping,
"AssetItem": AssetItem,
},
)

View File

@ -0,0 +1,83 @@
from typing import TYPE_CHECKING, Optional
from litestar import Controller, get, post
from litestar.contrib.repository.filters import LimitOffset, SearchFilter
from litestar.di import Provide
from sqlalchemy.ext.asyncio import AsyncSession
from app.domain.company import Company
from app.domain.asset_item import (
AssetItem,
AssetItemReadDTO,
AssetItemWriteDTO,
Repository,
Service,
)
from app.lib.responses import ObjectListResponse, ObjectResponse
if TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncSession
DETAIL_ROUTE = "/{asset_item_id:int}"
async def provides_service(db_session: AsyncSession, company_id: int) -> Service:
"""Constructs repository and service objects for the request."""
from app.controllers.company import provides_service
company_service = provides_service(db_session)
company = await company_service.get(company_id)
return Service(Repository(session=db_session, company=company))
async def get_company(db_session: AsyncSession, company_id: int) -> Company:
from app.controllers.company import provides_service
company_service = provides_service(db_session)
return await company_service.get(company_id)
class AssetItemController(Controller):
dto = AssetItemWriteDTO
return_dto = AssetItemReadDTO
path = "/company/{company_id:int}/asset-items"
dependencies = {
"service": Provide(provides_service, sync_to_thread=False),
}
tags = ["AssetItems"]
@post()
async def create_asset_item(
self, data: AssetItem, service: Service
) -> AssetItem:
return await service.create(data)
@get()
async def get_asset_items(
self,
service: Service,
search: Optional[str] = None,
) -> ObjectListResponse[AssetItem]:
filters = [
LimitOffset(limit=20, offset=0),
]
if search is not None:
filters.append(
SearchFilter(
field_name="caption",
value=search,
),
)
content = await service.list(*filters)
return ObjectListResponse(content=content)
@get(DETAIL_ROUTE)
async def get_asset_item(
self, service: Service, asset_item_id: int
) -> ObjectResponse[AssetItem]:
content = await service.get(asset_item_id)
return ObjectResponse(content=content)

View File

@ -0,0 +1,62 @@
from typing import TYPE_CHECKING, Optional
from litestar import Controller, get
from litestar.contrib.repository.filters import LimitOffset, SearchFilter
from litestar.di import Provide
from sqlalchemy.ext.asyncio import AsyncSession
from app.domain.company import (
Company,
CompanyReadDTO,
CompanyWriteDTO,
Repository,
Service,
)
from app.lib.responses import ObjectListResponse, ObjectResponse
if TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncSession
DETAIL_ROUTE = "/{company_id:int}"
def provides_service(db_session: AsyncSession) -> Service:
"""Constructs repository and service objects for the request."""
return Service(Repository(session=db_session))
class CompanyController(Controller):
dto = CompanyWriteDTO
return_dto = CompanyReadDTO
path = "/companies"
dependencies = {
"service": Provide(provides_service, sync_to_thread=False),
}
tags = ["Companies"]
@get()
async def get_companies(
self, service: Service, search: Optional[str] = None
) -> ObjectListResponse[Company]:
filters = [
LimitOffset(limit=20, offset=0),
]
if search is not None:
filters.append(
SearchFilter(
field_name="caption",
value=search,
),
)
content = await service.list(*filters)
return ObjectListResponse(content=content)
@get(DETAIL_ROUTE)
async def get_company(
self, service: Service, company_id: int
) -> ObjectResponse[Company]:
content = await service.get(company_id)
return ObjectResponse(content=content)

View File

@ -0,0 +1,66 @@
from typing import TYPE_CHECKING, Optional
from litestar import Controller, get, post
from litestar.di import Provide
from sqlalchemy.ext.asyncio import AsyncSession
from app.domain.fiscal_payment_mapping import (
FiscalPaymentMapping,
FiscalPaymentMappingReadDTO,
FiscalPaymentMappingWriteDTO,
Repository,
Service,
)
from app.lib.filters import ExactFilter
from app.lib.responses import ObjectListResponse, ObjectResponse
if TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncSession
DETAIL_ROUTE = "/{id:int}"
def provides_service(db_session: AsyncSession) -> Service:
"""Constructs repository and service objects for the request."""
return Service(Repository(session=db_session))
class FiscalPaymentMappingController(Controller):
dto = FiscalPaymentMappingWriteDTO
return_dto = FiscalPaymentMappingReadDTO
path = "/fiscal-payment-mappings"
dependencies = {
"service": Provide(provides_service, sync_to_thread=False),
}
tags = ["FiscalPaymentMappings"]
@post()
async def create_fiscal_payment_mappings(
self, data: FiscalPaymentMapping, service: Service
) -> FiscalPaymentMapping:
return await service.create(data)
@get()
async def get_fiscal_payment_mappings(
self, service: Service, payment_device_code: Optional[int] = None
) -> ObjectListResponse[FiscalPaymentMapping]:
filters = []
if payment_device_code is not None:
filters.append(
ExactFilter[int](
field_name="payment_device_code",
value=payment_device_code,
),
)
content = await service.list(*filters)
return ObjectListResponse(content=content)
@get(DETAIL_ROUTE)
async def get_fiscal_payment_mapping(
self, service: Service, id: int
) -> ObjectResponse[FiscalPaymentMapping]:
content = await service.get(id)
return ObjectResponse(content=content)

View File

@ -0,0 +1,76 @@
from typing import TYPE_CHECKING, Optional
from litestar import Controller, get
from litestar.contrib.repository.filters import LimitOffset, SearchFilter
from litestar.di import Provide
from sqlalchemy.ext.asyncio import AsyncSession
from app.domain.company import Company
from app.domain.machine import (
Machine,
MachineReadDTO,
MachineWriteDTO,
Repository,
Service,
)
from app.lib.responses import ObjectListResponse, ObjectResponse
if TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncSession
DETAIL_ROUTE = "/{machine_id:int}"
async def provides_service(db_session: AsyncSession, company_id: int) -> Service:
"""Constructs repository and service objects for the request."""
from app.controllers.company import provides_service
company_service = provides_service(db_session)
company = await company_service.get(company_id)
return Service(Repository(session=db_session, company=company))
async def get_company(db_session: AsyncSession, company_id: int) -> Company:
from app.controllers.company import provides_service
company_service = provides_service(db_session)
return await company_service.get(company_id)
class MachineController(Controller):
dto = MachineWriteDTO
return_dto = MachineReadDTO
path = "/company/{company_id:int}/machines"
dependencies = {
"service": Provide(provides_service, sync_to_thread=False),
}
tags = ["Machines"]
@get()
async def get_machines(
self,
service: Service,
search: Optional[str] = None,
) -> ObjectListResponse[Machine]:
filters = [
LimitOffset(limit=20, offset=0),
]
if search is not None:
filters.append(
SearchFilter(
field_name="caption",
value=search,
),
)
content = await service.list(*filters)
return ObjectListResponse(content=content)
@get(DETAIL_ROUTE)
async def get_machine(
self, service: Service, machine_id: int
) -> ObjectResponse[Machine]:
content = await service.get(machine_id)
return ObjectResponse(content=content)

0
app/domain/__init__.py Normal file
View File

79
app/domain/asset_item.py Normal file
View File

@ -0,0 +1,79 @@
from __future__ import annotations
from datetime import datetime
from typing import Annotated, Optional
import sqlalchemy
from litestar.contrib.sqlalchemy.base import BigIntBase
from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
from litestar.dto import DTOConfig, MsgspecDTO
from msgspec import Struct, Meta
from sqlalchemy.orm import Mapped, mapped_column
from app.domain.enums import AssetItemProductLineEnum, AssetItemStatusEnum
from app.lib import service
from app.lib.company_owned_repository import CompanyOwnedRepository
class AssetItem(BigIntBase):
__tablename__ = "asset_items" # type: ignore[assignment]
company_id: Mapped[int]
product_line: Mapped[AssetItemProductLineEnum] = mapped_column(
sqlalchemy.Enum(AssetItemProductLineEnum, name="asset_product_line_enum")
)
brand_id: Mapped[int]
model_id: Mapped[int]
serial_number: Mapped[str]
external_id: Mapped[Optional[str]]
alive: Mapped[bool]
status: Mapped[AssetItemStatusEnum] = mapped_column(
sqlalchemy.Enum(AssetItemStatusEnum, name="asset_status_enum")
)
created_by_id: Mapped[Optional[int]]
created_at: Mapped[datetime]
last_modified_by_id: Mapped[Optional[int]]
last_modified_at: Mapped[datetime]
is_fiscal_device: Mapped[bool]
warehouse_id: Mapped[Optional[int]]
PositiveInt = Annotated[int, Meta(gt=0)]
class AssetItemWriteStruct(Struct):
company_id: PositiveInt
product_line: AssetItemProductLineEnum
brand_id: PositiveInt
model_id: PositiveInt
serial_number: Annotated[str, Meta(max_length=10)]
external_id: Annotated[str, Meta(max_length=10)] | None
alive: bool
status: AssetItemStatusEnum
created_by_id: PositiveInt | None
created_at: datetime
last_modified_by_id: PositiveInt | None
last_modified_at: datetime | None
is_fiscal_device: bool
warehouse_id: PositiveInt | None
class XXAssetItemWriteDTO(MsgspecDTO[AssetItemWriteStruct]):
...
class Repository(CompanyOwnedRepository[AssetItem]):
model_type = AssetItem
alive_flag = "alive"
company_id_field = "company_id"
class Service(service.Service[AssetItem]):
repository_type = Repository
write_config = DTOConfig(exclude={"id"})
AssetItemWriteDTO = SQLAlchemyDTO[Annotated[AssetItem, write_config]]
# AssetItemWriteDTO = MsgspecDTO[AssetItemWriteStruct]
AssetItemReadDTO = SQLAlchemyDTO[AssetItem]

36
app/domain/company.py Normal file
View File

@ -0,0 +1,36 @@
from typing import Annotated
from litestar.contrib.sqlalchemy.base import BigIntBase
from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
from litestar.dto import DTOConfig
from sqlalchemy.orm import Mapped
from app.lib import service
from app.lib.filter_repository import FilterRepository
class Company(BigIntBase):
__tablename__ = "vending_companies" # type: ignore[assignment]
caption: Mapped[str]
address: Mapped[str]
city: Mapped[str]
phone: Mapped[str]
enabled: Mapped[str]
country_code: Mapped[str]
external_id: Mapped[str]
alive: Mapped[bool]
class Repository(FilterRepository[Company]):
model_type = Company
alive_flag = "alive"
class Service(service.Service[Company]):
repository_type = Repository
write_config = DTOConfig(exclude={"id"})
CompanyWriteDTO = SQLAlchemyDTO[Annotated[Company, write_config]]
CompanyReadDTO = SQLAlchemyDTO[Company]

43
app/domain/enums.py Normal file
View File

@ -0,0 +1,43 @@
from enum import Enum
class FiscalModuleEnum(str, Enum):
CROATIA = "CROATIA"
HUNGARY = "HUNGARY"
ITALY = "ITALY"
MONTENEGRO = "MONTENEGRO"
ROMANIA = "ROMANIA"
RUSSIA = "RUSSIA"
SERBIA = "SERBIA"
class PaymentTypeEnum(str, Enum):
CA = "CA"
DA = "DA"
DB = "DB"
DC = "DC"
DD = "DD"
PA4 = "PA4"
NEG = "NEG"
PA3 = "PA3"
TA = "TA"
WLT = "WLT"
class AssetItemProductLineEnum(str, Enum):
VENDING_MACHINE = "VENDING_MACHINE"
HORECA_MACHINE = "HORECA_MACHINE"
PROFESSIONAL_COFFEE_MACHINE = "PROFESSIONAL_COFFEE_MACHINE"
TELEMETRY_DEVICE = "TELEMETRY_DEVICE"
COIN_CHANGER = "COIN_CHANGER"
CASHLESS_PAYMENT_DEVICE = "CASHLESS_PAYMENT_DEVICE"
BANKNOTE_ACCEPTOR = "BANKNOTE_ACCEPTOR"
BOILER = "BOILER"
STEAMER = "STEAMER"
class AssetItemStatusEnum(str, Enum):
AVAILABLE = "AVAILABLE"
IN_USE = "IN_USE"
REPARATION = "REPARATION"
DISPOSED = "DISPOSED"

View File

@ -0,0 +1,40 @@
from typing import Annotated, Optional
import sqlalchemy
from litestar.contrib.sqlalchemy.base import BigIntBase
from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
from litestar.dto import DTOConfig
from sqlalchemy.orm import Mapped, mapped_column
from app.domain.enums import FiscalModuleEnum, PaymentTypeEnum
from app.lib import service
from app.lib.filter_repository import FilterRepository
class FiscalPaymentMapping(BigIntBase):
__tablename__ = "fiscal_payment_mapping" # type: ignore[assignment]
fiscal_module: Mapped[FiscalModuleEnum] = mapped_column(
sqlalchemy.Enum(FiscalModuleEnum, name="fiscal_module_enum")
)
code: Mapped[str]
payment_type: Mapped[PaymentTypeEnum] = mapped_column(
sqlalchemy.Enum(PaymentTypeEnum, name="televend_payment_type")
)
operation_mode_code: Mapped[int]
payment_device_code: Mapped[Optional[int]]
class Repository(FilterRepository[FiscalPaymentMapping]):
model_type = FiscalPaymentMapping
class Service(service.Service[FiscalPaymentMapping]):
repository_type = Repository
write_config = DTOConfig(exclude={"id"})
FiscalPaymentMappingWriteDTO = SQLAlchemyDTO[
Annotated[FiscalPaymentMapping, write_config]
]
FiscalPaymentMappingReadDTO = SQLAlchemyDTO[FiscalPaymentMapping]

35
app/domain/machine.py Normal file
View File

@ -0,0 +1,35 @@
from typing import Annotated
from litestar.contrib.sqlalchemy.base import BigIntBase
from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
from litestar.dto import DTOConfig
from sqlalchemy.orm import Mapped
from app.lib import service
from app.lib.company_owned_repository import CompanyOwnedRepository
class Machine(BigIntBase):
__tablename__ = "machines" # type: ignore[assignment]
caption: Mapped[str]
enabled: Mapped[bool]
alive: Mapped[bool]
deleted: Mapped[bool]
external_id: Mapped[str]
owner_id: Mapped[int]
class Repository(CompanyOwnedRepository[Machine]):
model_type = Machine
alive_flag = "alive"
company_id_field = "owner_id"
class Service(service.Service[Machine]):
repository_type = Repository
write_config = DTOConfig(exclude={"id"})
MachineWriteDTO = SQLAlchemyDTO[Annotated[Machine, write_config]]
MachineReadDTO = SQLAlchemyDTO[Machine]

0
app/dto/__init__.py Normal file
View File

0
app/lib/__init__.py Normal file
View File

View File

@ -0,0 +1,89 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from litestar.contrib.repository import FilterTypes
from litestar.contrib.sqlalchemy.repository import ModelT
from litestar.contrib.sqlalchemy.repository.types import SelectT
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import InstrumentedAttribute
from app.lib.filter_repository import FilterRepository
if TYPE_CHECKING:
from app.domain.company import Company
class CompanyOwnedRepository(FilterRepository[ModelT]):
company_id_field: str | None = None
def __init__(
self,
*,
company: Company,
statement: SelectT | None = None,
session: AsyncSession,
auto_expunge: bool = False,
auto_refresh: bool = True,
auto_commit: bool = False,
**kwargs: Any,
) -> None:
self.company = company
super().__init__(
statement=statement,
session=session,
auto_expunge=auto_expunge,
auto_refresh=auto_refresh,
auto_commit=auto_commit,
**kwargs,
)
def _get_company_filter_statement(self, statement: SelectT | None) -> SelectT:
if not self.company_id_field:
raise AttributeError(
f"company_id_field must be set for {self.__class__.__name__}"
)
column = self._get_column_by_name(self.company_id_field)
if column is None:
raise AttributeError(
f"column {self.company_id_field} not found in {self.__class__.__name__}"
)
stmt = statement if statement is not None else self.statement
return stmt.where(column == self.company.id)
def _apply_filters(
self, *filters: FilterTypes, apply_pagination: bool = True, statement: SelectT
) -> SelectT:
if not self.company_id_field:
raise AttributeError(
f"company_id_field must be set for {self.__class__.__name__}"
)
statement = super()._apply_filters(
*filters, apply_pagination=apply_pagination, statement=statement
)
statement = statement.where(
self._get_column_by_name(self.company_id_field) == self.company.id
)
return statement
async def get( # type: ignore[override]
self,
item_id: Any,
auto_expunge: bool | None = None,
statement: SelectT | None = None,
id_attribute: str | InstrumentedAttribute | None = None,
) -> ModelT:
statement = self._get_company_filter_statement(statement)
return await super().get(
item_id=item_id,
auto_expunge=auto_expunge,
statement=statement,
id_attribute=id_attribute,
)

150
app/lib/dependencies.py Normal file
View File

@ -0,0 +1,150 @@
from datetime import datetime
from uuid import UUID
from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, FilterTypes, LimitOffset
from litestar.di import Provide
from litestar.params import Dependency, Parameter
DEFAULT_PAGINATION_LIMIT = 20
__all__ = [
"create_collection_dependencies",
"provide_created_filter",
"provide_filter_dependencies",
"provide_id_filter",
"provide_limit_offset_pagination",
"provide_updated_filter",
]
DTorNone = datetime | None
CREATED_FILTER_DEPENDENCY_KEY = "created_filter"
FILTERS_DEPENDENCY_KEY = "filters"
ID_FILTER_DEPENDENCY_KEY = "id_filter"
LIMIT_OFFSET_DEPENDENCY_KEY = "limit_offset"
UPDATED_FILTER_DEPENDENCY_KEY = "updated_filter"
def provide_id_filter(
ids: list[UUID] | None = Parameter(query="ids", default=None, required=False)
) -> CollectionFilter[UUID]:
"""Return type consumed by ``Repository.filter_in_collection()``.
Parameters
----------
ids : list[UUID] | None
Parsed out of comma separated list of values in query params.
Returns:
-------
CollectionFilter[UUID]
"""
return CollectionFilter(field_name="id", values=ids or [])
def provide_created_filter(
before: DTorNone = Parameter(query="created-before", default=None, required=False),
after: DTorNone = Parameter(query="created-after", default=None, required=False),
) -> BeforeAfter:
"""Return type consumed by `Repository.filter_on_datetime_field()`.
Parameters
----------
before : datetime | None
Filter for records updated before this date/time.
after : datetime | None
Filter for records updated after this date/time.
"""
return BeforeAfter("created_at", before, after)
def provide_updated_filter(
before: DTorNone = Parameter(query="updated-before", default=None, required=False),
after: DTorNone = Parameter(query="updated-after", default=None, required=False),
) -> BeforeAfter:
"""Return type consumed by `Repository.filter_on_datetime_field()`.
Parameters
----------
before : datetime | None
Filter for records updated before this date/time.
after : datetime | None
Filter for records updated after this date/time.
"""
return BeforeAfter("updated_at", before, after)
def provide_limit_offset_pagination(
page: int = Parameter(ge=1, default=1, required=False),
page_size: int = Parameter(
query="page-size",
ge=1,
default=DEFAULT_PAGINATION_LIMIT,
required=False,
),
) -> LimitOffset:
"""Return type consumed by `Repository.apply_limit_offset_pagination()`.
Parameters
----------
page : int
LIMIT to apply to select.
page_size : int
OFFSET to apply to select.
"""
return LimitOffset(page_size, page_size * (page - 1))
def provide_filter_dependencies(
created_filter: BeforeAfter = Dependency(skip_validation=True),
updated_filter: BeforeAfter = Dependency(skip_validation=True),
id_filter: CollectionFilter = Dependency(skip_validation=True),
limit_offset: LimitOffset = Dependency(skip_validation=True),
) -> list[FilterTypes]:
"""Common collection route filtering dependencies. Add all filters to any
route by including this function as a dependency, e.g:
@get
def get_collection_handler(filters: Filters) -> ...:
...
The dependency is provided at the application layer, so only need to inject the dependency where
necessary.
Parameters
----------
id_filter : repository.CollectionFilter
Filter for scoping query to limited set of identities.
created_filter : repository.BeforeAfter
Filter for scoping query to instance creation date/time.
updated_filter : repository.BeforeAfter
Filter for scoping query to instance update date/time.
limit_offset : repository.LimitOffset
Filter for query pagination.
Returns:
-------
list[FilterTypes]
List of filters parsed from connection.
"""
return [
created_filter,
id_filter,
limit_offset,
updated_filter,
]
def create_collection_dependencies() -> dict[str, Provide]:
"""Creates a dictionary of provides for pagination endpoints.
Returns:
-------
dict[str, Provide]
"""
return {
LIMIT_OFFSET_DEPENDENCY_KEY: Provide(provide_limit_offset_pagination, sync_to_thread=False),
UPDATED_FILTER_DEPENDENCY_KEY: Provide(provide_updated_filter, sync_to_thread=False),
CREATED_FILTER_DEPENDENCY_KEY: Provide(provide_created_filter, sync_to_thread=False),
ID_FILTER_DEPENDENCY_KEY: Provide(provide_id_filter, sync_to_thread=False),
FILTERS_DEPENDENCY_KEY: Provide(provide_filter_dependencies, sync_to_thread=False),
}

68
app/lib/exceptions.py Normal file
View File

@ -0,0 +1,68 @@
import logging
from typing import TYPE_CHECKING
from litestar.contrib.repository.exceptions import (
ConflictError as RepositoryConflictException,
)
from litestar.contrib.repository.exceptions import (
NotFoundError as RepositoryNotFoundException,
)
from litestar.contrib.repository.exceptions import (
RepositoryError as RepositoryException,
)
from litestar.exceptions import (
HTTPException,
InternalServerException,
NotFoundException,
)
from litestar.middleware.exceptions.middleware import create_exception_response
from .service import ServiceError
if TYPE_CHECKING:
from litestar.connection import Request
from litestar.response import Response
__all__ = [
"repository_exception_to_http_response",
"service_exception_to_http_response",
]
logger = logging.getLogger(__name__)
class ConflictException(HTTPException):
status_code = 409
def repository_exception_to_http_response(request: "Request", exc: RepositoryException) -> "Response":
"""Transform repository exceptions to HTTP exceptions.
Args:
_: The request that experienced the exception.
exc: Exception raised during handling of the request.
Returns:
Exception response appropriate to the type of original exception.
"""
http_exc: type[HTTPException]
if isinstance(exc, RepositoryNotFoundException):
http_exc = NotFoundException
elif isinstance(exc, RepositoryConflictException):
http_exc = ConflictException
else:
http_exc = InternalServerException
return create_exception_response(request, exc=http_exc())
def service_exception_to_http_response(request: "Request", exc: ServiceError) -> "Response":
"""Transform service exceptions to HTTP exceptions.
Args:
_: The request that experienced the exception.
exc: Exception raised during handling of the request.
Returns:
Exception response appropriate to the type of original exception.
"""
return create_exception_response(request, InternalServerException())

View File

@ -0,0 +1,38 @@
from typing import Optional, cast
from litestar.contrib.repository import FilterTypes
from litestar.contrib.sqlalchemy.repository import ModelT, SQLAlchemyAsyncRepository
from litestar.contrib.sqlalchemy.repository.types import SelectT
from sqlalchemy import true, Column
from app.lib.filters import ExactFilter
class FilterRepository(SQLAlchemyAsyncRepository[ModelT]):
alive_flag: Optional[str] = None
def _get_column_by_name(self, name: str) -> Column | None:
return cast(Column, getattr(self.model_type, name, None))
def _apply_filters(
self, *filters: FilterTypes, apply_pagination: bool = True, statement: SelectT
) -> SelectT:
standard_filters = []
for filter_ in filters:
if isinstance(filter_, ExactFilter):
statement = statement.where(
self._get_column_by_name(filter_.field_name) == filter_.value
)
else:
standard_filters.append(filter_)
statement = super()._apply_filters(
*standard_filters, apply_pagination=apply_pagination, statement=statement
)
if self.alive_flag:
statement = statement.where(
self._get_column_by_name(self.alive_flag) == true()
)
return statement

10
app/lib/filters.py Normal file
View File

@ -0,0 +1,10 @@
from dataclasses import dataclass
from typing import Generic, TypeVar
T = TypeVar("T")
@dataclass
class ExactFilter(Generic[T]):
field_name: str
value: T

26
app/lib/responses.py Normal file
View File

@ -0,0 +1,26 @@
from dataclasses import dataclass
from typing import Generic, TypeVar
T = TypeVar("T")
@dataclass
class ObjectResponse(Generic[T]):
content: T
@dataclass
class ObjectListResponse(Generic[T]):
content: list[T]
@dataclass
class PaginationMeta:
page: int
page_count: int
@dataclass
class PaginatedObjectListResponse(Generic[T]):
content: list[T]
meta: PaginationMeta

95
app/lib/service.py Normal file
View File

@ -0,0 +1,95 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Generic
from litestar.contrib.sqlalchemy.repository import ModelT
__all__ = ["Service", "ServiceError"]
if TYPE_CHECKING:
from litestar.contrib.repository import AbstractAsyncRepository, FilterTypes
class ServiceError(Exception):
"""Base class for `Service` related exceptions."""
class Service(Generic[ModelT]):
def __init__(self, repository: AbstractAsyncRepository[ModelT]) -> None:
"""Generic Service object.
Args:
repository: Instance conforming to `AbstractRepository` interface.
"""
self.repository = repository
async def create(self, data: ModelT) -> ModelT:
"""Wraps repository instance creation.
Args:
data: Representation to be created.
Returns:
Representation of created instance.
"""
return await self.repository.add(data)
async def list(self, *filters: FilterTypes, **kwargs: Any) -> list[ModelT]:
"""Wraps repository scalars operation.
Args:
*filters: Collection route filters.
**kwargs: Keyword arguments for attribute based filtering.
Returns:
The list of instances retrieved from the repository.
"""
return await self.repository.list(*filters, **kwargs)
async def update(self, id_: Any, data: ModelT) -> ModelT:
"""Wraps repository update operation.
Args:
id_: Identifier of item to be updated.
data: Representation to be updated.
Returns:
Updated representation.
"""
return await self.repository.update(data)
async def upsert(self, id_: Any, data: ModelT) -> ModelT:
"""Wraps repository upsert operation.
Args:
id_: Identifier of the object for upsert.
data: Representation for upsert.
Returns:
-------
Updated or created representation.
"""
return await self.repository.upsert(data)
async def get(self, id_: Any) -> ModelT:
"""Wraps repository scalar operation.
Args:
id_: Identifier of instance to be retrieved.
Returns:
Representation of instance with identifier `id_`.
"""
return await self.repository.get(id_)
async def delete(self, id_: Any) -> ModelT:
"""Wraps repository delete operation.
Args:
id_: Identifier of instance to be deleted.
Returns:
Representation of the deleted instance.
"""
return await self.repository.delete(id_)

View File

@ -0,0 +1,143 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import TYPE_CHECKING, cast, Literal
from uuid import UUID
import msgspec
from litestar.contrib.sqlalchemy.plugins.init import SQLAlchemyInitPlugin
from litestar.contrib.sqlalchemy.plugins.init.config import SQLAlchemyAsyncConfig
from litestar.contrib.sqlalchemy.plugins.init.config.common import (
SESSION_SCOPE_KEY,
SESSION_TERMINUS_ASGI_EVENTS,
)
from litestar.utils import delete_litestar_scope_state, get_litestar_scope_state
from sqlalchemy import event
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from sqlalchemy.pool import NullPool
@dataclass
class DatabaseSettings:
URL: str = "postgresql+asyncpg://televend:televend@localhost:5433/televend"
ECHO: bool = True
ECHO_POOL: bool | Literal["debug"] = False
POOL_DISABLE: bool = False
POOL_MAX_OVERFLOW: int = 10
POOL_SIZE: int = 5
POOL_TIMEOUT: int = 30
DB_SESSION_DEPENDENCY_KEY: str = "db_session"
settings = DatabaseSettings()
if TYPE_CHECKING:
from typing import Any
from litestar.types.asgi_types import Message, Scope
__all__ = [
"async_session_factory",
"config",
"engine",
"plugin",
]
def _default(val: Any) -> str:
if isinstance(val, UUID):
return str(val)
raise TypeError()
engine = create_async_engine(
settings.URL,
echo=settings.ECHO,
echo_pool=settings.ECHO_POOL,
json_serializer=msgspec.json.Encoder(enc_hook=_default),
max_overflow=settings.POOL_MAX_OVERFLOW,
pool_size=settings.POOL_SIZE,
pool_timeout=settings.POOL_TIMEOUT,
poolclass=NullPool if settings.POOL_DISABLE else None,
)
"""Configure via DatabaseSettings.
Overrides default JSON
serializer to use `msgspec`. See [`create_async_engine()`][sqlalchemy.ext.asyncio.create_async_engine]
for detailed instructions.
"""
async_session_factory = async_sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
"""Database session factory.
See [`async_sessionmaker()`][sqlalchemy.ext.asyncio.async_sessionmaker].
"""
@event.listens_for(engine.sync_engine, "connect")
def _sqla_on_connect(dbapi_connection: Any, _: Any) -> Any:
"""Using orjson for serialization of the json column values means that the
output is binary, not `str` like `json.dumps` would output.
SQLAlchemy expects that the json serializer returns `str` and calls
`.encode()` on the value to turn it to bytes before writing to the
JSONB column. I'd need to either wrap `orjson.dumps` to return a
`str` so that SQLAlchemy could then convert it to binary, or do the
following, which changes the behaviour of the dialect to expect a
binary value from the serializer.
See Also:
https://github.com/sqlalchemy/sqlalchemy/blob/14bfbadfdf9260a1c40f63b31641b27fe9de12a0/lib/sqlalchemy/dialects/postgresql/asyncpg.py#L934
"""
def encoder(bin_value: bytes) -> bytes:
# \x01 is the prefix for jsonb used by PostgreSQL.
# asyncpg requires it when format='binary'
return b"\x01" + bin_value
def decoder(bin_value: bytes) -> Any:
# the byte is the \x01 prefix for jsonb used by PostgreSQL.
# asyncpg returns it when format='binary'
return msgspec.json.decode(bin_value[1:])
dbapi_connection.await_(
dbapi_connection.driver_connection.set_type_codec(
"jsonb",
encoder=encoder,
decoder=decoder,
schema="pg_catalog",
format="binary",
)
)
async def before_send_handler(message: Message, scope: Scope) -> None:
"""Custom `before_send_handler` for SQLAlchemy plugin that inspects the
status of response and commits, or rolls back the database.
Args:
message: ASGI message
_:
scope: ASGI scope
"""
session = cast("AsyncSession | None", get_litestar_scope_state(scope, SESSION_SCOPE_KEY))
try:
if session is not None and message["type"] == "http.response.start":
if 200 <= message["status"] < 300:
await session.commit()
else:
await session.rollback()
finally:
if session is not None and message["type"] in SESSION_TERMINUS_ASGI_EVENTS:
await session.close()
delete_litestar_scope_state(scope, SESSION_SCOPE_KEY)
config = SQLAlchemyAsyncConfig(
session_dependency_key=settings.DB_SESSION_DEPENDENCY_KEY,
engine_instance=engine,
session_maker=async_session_factory,
before_send_handler=before_send_handler,
)
plugin = SQLAlchemyInitPlugin(config=config)

31
main.py
View File

@ -1,9 +1,28 @@
from typing import Any
from litestar import Litestar, get
from litestar.contrib.repository.exceptions import (
RepositoryError as RepositoryException,
)
from litestar.openapi import OpenAPIConfig
from app.controllers import create_router
from app.lib import exceptions, sqlalchemy_plugin
from app.lib.service import ServiceError
def create_app(**kwargs: Any) -> Litestar:
return Litestar(
route_handlers=[create_router()],
openapi_config=OpenAPIConfig(title="My API", version="1.0.0"),
# dependencies={"session": provide_transaction},
plugins=[sqlalchemy_plugin.plugin],
exception_handlers={
RepositoryException: exceptions.repository_exception_to_http_response, # type: ignore[dict-item]
ServiceError: exceptions.service_exception_to_http_response, # type: ignore[dict-item]
},
debug=True,
**kwargs,
)
@get("/")
async def hello_world() -> str:
return "Hello, world!"
app = Litestar([hello_world])
app = create_app()

258
poetry.lock generated
View File

@ -20,6 +20,59 @@ doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-
test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (<0.22)"]
[[package]]
name = "asyncpg"
version = "0.28.0"
description = "An asyncio PostgreSQL driver"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "asyncpg-0.28.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a6d1b954d2b296292ddff4e0060f494bb4270d87fb3655dd23c5c6096d16d83"},
{file = "asyncpg-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0740f836985fd2bd73dca42c50c6074d1d61376e134d7ad3ad7566c4f79f8184"},
{file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e907cf620a819fab1737f2dd90c0f185e2a796f139ac7de6aa3212a8af96c050"},
{file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b339984d55e8202e0c4b252e9573e26e5afa05617ed02252544f7b3e6de3e9"},
{file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c402745185414e4c204a02daca3d22d732b37359db4d2e705172324e2d94e85"},
{file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c88eef5e096296626e9688f00ab627231f709d0e7e3fb84bb4413dff81d996d7"},
{file = "asyncpg-0.28.0-cp310-cp310-win32.whl", hash = "sha256:90a7bae882a9e65a9e448fdad3e090c2609bb4637d2a9c90bfdcebbfc334bf89"},
{file = "asyncpg-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:76aacdcd5e2e9999e83c8fbcb748208b60925cc714a578925adcb446d709016c"},
{file = "asyncpg-0.28.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a0e08fe2c9b3618459caaef35979d45f4e4f8d4f79490c9fa3367251366af207"},
{file = "asyncpg-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b24e521f6060ff5d35f761a623b0042c84b9c9b9fb82786aadca95a9cb4a893b"},
{file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99417210461a41891c4ff301490a8713d1ca99b694fef05dabd7139f9d64bd6c"},
{file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f029c5adf08c47b10bcdc857001bbef551ae51c57b3110964844a9d79ca0f267"},
{file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ad1d6abf6c2f5152f46fff06b0e74f25800ce8ec6c80967f0bc789974de3c652"},
{file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d7fa81ada2807bc50fea1dc741b26a4e99258825ba55913b0ddbf199a10d69d8"},
{file = "asyncpg-0.28.0-cp311-cp311-win32.whl", hash = "sha256:f33c5685e97821533df3ada9384e7784bd1e7865d2b22f153f2e4bd4a083e102"},
{file = "asyncpg-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:5e7337c98fb493079d686a4a6965e8bcb059b8e1b8ec42106322fc6c1c889bb0"},
{file = "asyncpg-0.28.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1c56092465e718a9fdcc726cc3d9dcf3a692e4834031c9a9f871d92a75d20d48"},
{file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4acd6830a7da0eb4426249d71353e8895b350daae2380cb26d11e0d4a01c5472"},
{file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63861bb4a540fa033a56db3bb58b0c128c56fad5d24e6d0a8c37cb29b17c1c7d"},
{file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a93a94ae777c70772073d0512f21c74ac82a8a49be3a1d982e3f259ab5f27307"},
{file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d14681110e51a9bc9c065c4e7944e8139076a778e56d6f6a306a26e740ed86d2"},
{file = "asyncpg-0.28.0-cp37-cp37m-win32.whl", hash = "sha256:8aec08e7310f9ab322925ae5c768532e1d78cfb6440f63c078b8392a38aa636a"},
{file = "asyncpg-0.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:319f5fa1ab0432bc91fb39b3960b0d591e6b5c7844dafc92c79e3f1bff96abef"},
{file = "asyncpg-0.28.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b337ededaabc91c26bf577bfcd19b5508d879c0ad009722be5bb0a9dd30b85a0"},
{file = "asyncpg-0.28.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d32b680a9b16d2957a0a3cc6b7fa39068baba8e6b728f2e0a148a67644578f4"},
{file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f62f04cdf38441a70f279505ef3b4eadf64479b17e707c950515846a2df197"},
{file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f20cac332c2576c79c2e8e6464791c1f1628416d1115935a34ddd7121bfc6a4"},
{file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:59f9712ce01e146ff71d95d561fb68bd2d588a35a187116ef05028675462d5ed"},
{file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9e9f9ff1aa0eddcc3247a180ac9e9b51a62311e988809ac6152e8fb8097756"},
{file = "asyncpg-0.28.0-cp38-cp38-win32.whl", hash = "sha256:9e721dccd3838fcff66da98709ed884df1e30a95f6ba19f595a3706b4bc757e3"},
{file = "asyncpg-0.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ba7d06a0bea539e0487234511d4adf81dc8762249858ed2a580534e1720db00"},
{file = "asyncpg-0.28.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d009b08602b8b18edef3a731f2ce6d3f57d8dac2a0a4140367e194eabd3de457"},
{file = "asyncpg-0.28.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ec46a58d81446d580fb21b376ec6baecab7288ce5a578943e2fc7ab73bf7eb39"},
{file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b48ceed606cce9e64fd5480a9b0b9a95cea2b798bb95129687abd8599c8b019"},
{file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8858f713810f4fe67876728680f42e93b7e7d5c7b61cf2118ef9153ec16b9423"},
{file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e18438a0730d1c0c1715016eacda6e9a505fc5aa931b37c97d928d44941b4bf"},
{file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e9c433f6fcdd61c21a715ee9128a3ca48be8ac16fa07be69262f016bb0f4dbd2"},
{file = "asyncpg-0.28.0-cp39-cp39-win32.whl", hash = "sha256:41e97248d9076bc8e4849da9e33e051be7ba37cd507cbd51dfe4b2d99c70e3dc"},
{file = "asyncpg-0.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ed77f00c6aacfe9d79e9eff9e21729ce92a4b38e80ea99a58ed382f42ebd55b"},
{file = "asyncpg-0.28.0.tar.gz", hash = "sha256:7252cdc3acb2f52feaa3664280d3bcd78a46bd6c10bfd681acfffefa1120e278"},
]
[package.extras]
docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"]
[[package]]
name = "certifi"
version = "2023.7.22"
@ -95,6 +148,79 @@ files = [
{file = "fast_query_parsers-1.0.3.tar.gz", hash = "sha256:5200a9e02997ad51d4d76a60ea1b256a68a184b04359540eb6310a15013df68f"},
]
[[package]]
name = "greenlet"
version = "2.0.2"
description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
files = [
{file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"},
{file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"},
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"},
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"},
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
{file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"},
{file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"},
{file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"},
{file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"},
{file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"},
{file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"},
{file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"},
{file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"},
{file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"},
{file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"},
{file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"},
{file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"},
{file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"},
{file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"},
{file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"},
{file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"},
{file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"},
{file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"},
{file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"},
{file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"},
{file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"},
{file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"},
{file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"},
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"},
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"},
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"},
{file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"},
{file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"},
{file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"},
{file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"},
{file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"},
]
[package.extras]
docs = ["Sphinx", "docutils (<0.18)"]
test = ["objgraph", "psutil"]
[[package]]
name = "h11"
version = "0.14.0"
@ -353,6 +479,58 @@ msgspec = ["msgspec"]
odmantic = ["odmantic", "pydantic[email]"]
pydantic = ["pydantic[email]"]
[[package]]
name = "pydantic"
version = "1.10.12"
description = "Data validation and settings management using python type hints"
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"},
{file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"},
{file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"},
{file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"},
{file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"},
{file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"},
{file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"},
{file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"},
{file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"},
{file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"},
{file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"},
{file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"},
{file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"},
{file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"},
{file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"},
{file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"},
{file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"},
{file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"},
{file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"},
{file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"},
{file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"},
{file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"},
{file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"},
{file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"},
{file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"},
{file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"},
{file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"},
{file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"},
{file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"},
{file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"},
{file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"},
{file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"},
{file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"},
{file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"},
{file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"},
{file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"},
]
[package.dependencies]
typing-extensions = ">=4.2.0"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
@ -438,6 +616,84 @@ files = [
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
]
[[package]]
name = "sqlalchemy"
version = "2.0.20"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759b51346aa388c2e606ee206c0bc6f15a5299f6174d1e10cadbe4530d3c7a98"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1506e988ebeaaf316f183da601f24eedd7452e163010ea63dbe52dc91c7fc70e"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5768c268df78bacbde166b48be788b83dddaa2a5974b8810af422ddfe68a9bc8"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f0dd6d15b6dc8b28a838a5c48ced7455c3e1fb47b89da9c79cc2090b072a50"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:243d0fb261f80a26774829bc2cee71df3222587ac789b7eaf6555c5b15651eed"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb6d77c31e1bf4268b4d61b549c341cbff9842f8e115ba6904249c20cb78a61"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-win32.whl", hash = "sha256:bcb04441f370cbe6e37c2b8d79e4af9e4789f626c595899d94abebe8b38f9a4d"},
{file = "SQLAlchemy-2.0.20-cp310-cp310-win_amd64.whl", hash = "sha256:d32b5ffef6c5bcb452723a496bad2d4c52b346240c59b3e6dba279f6dcc06c14"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd81466bdbc82b060c3c110b2937ab65ace41dfa7b18681fdfad2f37f27acdd7"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe7d61dc71119e21ddb0094ee994418c12f68c61b3d263ebaae50ea8399c4d4"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4e571af672e1bb710b3cc1a9794b55bce1eae5aed41a608c0401885e3491179"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3364b7066b3c7f4437dd345d47271f1251e0cfb0aba67e785343cdbdb0fff08c"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1be86ccea0c965a1e8cd6ccf6884b924c319fcc85765f16c69f1ae7148eba64b"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1d35d49a972649b5080557c603110620a86aa11db350d7a7cb0f0a3f611948a0"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-win32.whl", hash = "sha256:27d554ef5d12501898d88d255c54eef8414576f34672e02fe96d75908993cf53"},
{file = "SQLAlchemy-2.0.20-cp311-cp311-win_amd64.whl", hash = "sha256:411e7f140200c02c4b953b3dbd08351c9f9818d2bd591b56d0fa0716bd014f1e"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3c6aceebbc47db04f2d779db03afeaa2c73ea3f8dcd3987eb9efdb987ffa09a3"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d3f175410a6db0ad96b10bfbb0a5530ecd4fcf1e2b5d83d968dd64791f810ed"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8186be85da6587456c9ddc7bf480ebad1a0e6dcbad3967c4821233a4d4df57"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3d99ba99007dab8233f635c32b5cd24fb1df8d64e17bc7df136cedbea427897"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76fdfc0f6f5341987474ff48e7a66c3cd2b8a71ddda01fa82fedb180b961630a"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-win32.whl", hash = "sha256:d3793dcf5bc4d74ae1e9db15121250c2da476e1af8e45a1d9a52b1513a393459"},
{file = "SQLAlchemy-2.0.20-cp37-cp37m-win_amd64.whl", hash = "sha256:79fde625a0a55220d3624e64101ed68a059c1c1f126c74f08a42097a72ff66a9"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:599ccd23a7146e126be1c7632d1d47847fa9f333104d03325c4e15440fc7d927"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a58052b5a93425f656675673ef1f7e005a3b72e3f2c91b8acca1b27ccadf5f4"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79543f945be7a5ada9943d555cf9b1531cfea49241809dd1183701f94a748624"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e73da7fb030ae0a46a9ffbeef7e892f5def4baf8064786d040d45c1d6d1dc5"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ce5e81b800a8afc870bb8e0a275d81957e16f8c4b62415a7b386f29a0cb9763"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb0d3e94c2a84215532d9bcf10229476ffd3b08f481c53754113b794afb62d14"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-win32.whl", hash = "sha256:8dd77fd6648b677d7742d2c3cc105a66e2681cc5e5fb247b88c7a7b78351cf74"},
{file = "SQLAlchemy-2.0.20-cp38-cp38-win_amd64.whl", hash = "sha256:6f8a934f9dfdf762c844e5164046a9cea25fabbc9ec865c023fe7f300f11ca4a"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:26a3399eaf65e9ab2690c07bd5cf898b639e76903e0abad096cd609233ce5208"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4cde2e1096cbb3e62002efdb7050113aa5f01718035ba9f29f9d89c3758e7e4e"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b09ba72e4e6d341bb5bdd3564f1cea6095d4c3632e45dc69375a1dbe4e26ec"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b74eeafaa11372627ce94e4dc88a6751b2b4d263015b3523e2b1e57291102f0"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77d37c1b4e64c926fa3de23e8244b964aab92963d0f74d98cbc0783a9e04f501"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eefebcc5c555803065128401a1e224a64607259b5eb907021bf9b175f315d2a6"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-win32.whl", hash = "sha256:3423dc2a3b94125094897118b52bdf4d37daf142cbcf26d48af284b763ab90e9"},
{file = "SQLAlchemy-2.0.20-cp39-cp39-win_amd64.whl", hash = "sha256:5ed61e3463021763b853628aef8bc5d469fe12d95f82c74ef605049d810f3267"},
{file = "SQLAlchemy-2.0.20-py3-none-any.whl", hash = "sha256:63a368231c53c93e2b67d0c5556a9836fdcd383f7e3026a39602aad775b14acf"},
{file = "SQLAlchemy-2.0.20.tar.gz", hash = "sha256:ca8a5ff2aa7f3ade6c498aaafce25b1eaeabe4e42b73e25519183e4566a16fc6"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""}
typing-extensions = ">=4.2.0"
[package.extras]
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)"]
mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx-oracle (>=7)"]
oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "typing-extensions"
version = "4.7.1"
@ -504,4 +760,4 @@ anyio = ">=3.0.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "1de8a8d36d3dce9aac7472bc4997a2f51d623e809d94c20f30e538bb73562c67"
content-hash = "411f2e851f3c3fdf05d5a8f1c200b8ded6223b8e4bb9b1be00db2f6a28a8dc26"

View File

@ -10,6 +10,9 @@ packages = [{include = "litestar_machines"}]
python = "^3.11"
litestar = "^2.0.1"
uvicorn = "^0.23.2"
sqlalchemy = "^2.0.20"
asyncpg = "^0.28.0"
pydantic = "<2.0"
[tool.poetry.group.dev.dependencies]