Working version
This commit is contained in:
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
15
app/controllers/__init__.py
Normal file
15
app/controllers/__init__.py
Normal file
@ -0,0 +1,15 @@
|
||||
from litestar import Router
|
||||
|
||||
from app.domain.machine import Machine
|
||||
|
||||
from . import machines
|
||||
|
||||
__all__ = ["create_router"]
|
||||
|
||||
|
||||
def create_router() -> Router:
|
||||
return Router(
|
||||
path="/v1",
|
||||
route_handlers=[machines.MachineController,],
|
||||
signature_namespace={"Machine": Machine,},
|
||||
)
|
||||
113
app/controllers/machines.py
Normal file
113
app/controllers/machines.py
Normal file
@ -0,0 +1,113 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Optional, cast
|
||||
|
||||
from litestar import Controller, delete, get, post, put
|
||||
from litestar.di import Provide
|
||||
from litestar.pagination import (
|
||||
AbstractAsyncOffsetPaginator,
|
||||
AbstractSyncClassicPaginator,
|
||||
)
|
||||
from litestar.status_codes import HTTP_200_OK
|
||||
from sqlalchemy import ScalarResult, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.domain.machine import MachineReadDTO, MachineWriteDTO, Repository, Service
|
||||
from app.domain.machine import Machine
|
||||
|
||||
from litestar.contrib.repository.filters import SearchFilter, LimitOffset
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
|
||||
|
||||
DETAIL_ROUTE = "/{machine_id:int}"
|
||||
|
||||
|
||||
def provides_service(db_session: AsyncSession) -> Service:
|
||||
"""Constructs repository and service objects for the request."""
|
||||
return Service(Repository(session=db_session))
|
||||
|
||||
|
||||
# class MachineOffsetPaginator(AbstractAsyncOffsetPaginator[Machine]):
|
||||
# def __init__(
|
||||
# self, async_session: AsyncSession
|
||||
# ) -> None: # 'async_session' dependency will be injected here.
|
||||
# self.async_session = async_session
|
||||
#
|
||||
# async def get_total(self) -> int:
|
||||
# return cast(
|
||||
# "int", await self.async_session.scalar(select(func.count(Machine.id)))
|
||||
# )
|
||||
#
|
||||
# async def get_items(self, limit: int, offset: int) -> list[Machine]:
|
||||
# people: ScalarResult = await self.async_session.scalars(
|
||||
# select(Machine).slice(offset, limit)
|
||||
# )
|
||||
# return list(people.all())
|
||||
|
||||
|
||||
class MachineController(Controller):
|
||||
dto = MachineWriteDTO
|
||||
return_dto = MachineReadDTO
|
||||
path = "/machines"
|
||||
dependencies = {
|
||||
"service": Provide(provides_service, sync_to_thread=False),
|
||||
}
|
||||
tags = ["Machines"]
|
||||
|
||||
@get()
|
||||
async def get_machines(
|
||||
self, service: Service,
|
||||
search: Optional[str] = None,
|
||||
) -> list[Machine]:
|
||||
"""Get a list of authors."""
|
||||
|
||||
print("#" * 100)
|
||||
print(search)
|
||||
print("#" * 100)
|
||||
|
||||
filters = [
|
||||
LimitOffset(
|
||||
limit=20,
|
||||
offset=0
|
||||
),
|
||||
]
|
||||
|
||||
if search:
|
||||
filters.append(
|
||||
SearchFilter(
|
||||
field_name="caption",
|
||||
value=search,
|
||||
),
|
||||
)
|
||||
|
||||
return await service.list(*filters)
|
||||
|
||||
# @get()
|
||||
# async def get_machines(
|
||||
# self, service: Service, filters: list[FilterTypes]
|
||||
# ) -> list[Machine]:
|
||||
# """Get a list of authors."""
|
||||
# return await service.list(*filters)
|
||||
|
||||
# @post()
|
||||
# async def create_author(self, data: Machine, service: Service) -> Machine:
|
||||
# return await service.create(data)
|
||||
#
|
||||
@get(DETAIL_ROUTE)
|
||||
async def get_machine(self, service: Service, machine_id: int) -> Machine:
|
||||
return await service.get(machine_id)
|
||||
#
|
||||
# @put(DETAIL_ROUTE)
|
||||
# async def update_author(
|
||||
# self, data: Machine, service: Service, author_id: UUID
|
||||
# ) -> Machine:
|
||||
# """Update an author."""
|
||||
# return await service.update(author_id, data)
|
||||
#
|
||||
# @delete(DETAIL_ROUTE, status_code=HTTP_200_OK)
|
||||
# async def delete_author(self, service: Service, author_id: UUID) -> Machine:
|
||||
# """Delete Author by ID."""
|
||||
# return await service.delete(author_id)
|
||||
36
app/database.py
Normal file
36
app/database.py
Normal file
@ -0,0 +1,36 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from litestar.contrib.sqlalchemy.plugins import SQLAlchemyAsyncConfig
|
||||
from litestar.exceptions import ClientException
|
||||
from litestar.status_codes import HTTP_409_CONFLICT
|
||||
from sqlalchemy import URL
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker
|
||||
|
||||
|
||||
async def provide_transaction(
|
||||
db_session: AsyncSession,
|
||||
) -> AsyncGenerator[AsyncSession, None]:
|
||||
try:
|
||||
async with db_session.begin():
|
||||
yield db_session
|
||||
except IntegrityError as exc:
|
||||
raise ClientException(
|
||||
status_code=HTTP_409_CONFLICT,
|
||||
detail=str(exc),
|
||||
) from exc
|
||||
|
||||
|
||||
sessionmaker = async_sessionmaker(expire_on_commit=False)
|
||||
|
||||
db_connection_string = URL.create(
|
||||
drivername="postgresql+asyncpg",
|
||||
username="televend",
|
||||
password="televend",
|
||||
host="localhost",
|
||||
port=5433,
|
||||
database="televend",
|
||||
)
|
||||
db_config = SQLAlchemyAsyncConfig(
|
||||
connection_string=db_connection_string.render_as_string(hide_password=False)
|
||||
)
|
||||
0
app/domain/__init__.py
Normal file
0
app/domain/__init__.py
Normal file
27
app/domain/machine.py
Normal file
27
app/domain/machine.py
Normal file
@ -0,0 +1,27 @@
|
||||
from typing import Annotated
|
||||
|
||||
from litestar.contrib.sqlalchemy.base import BigIntBase
|
||||
from litestar.contrib.sqlalchemy.dto import SQLAlchemyDTO
|
||||
from litestar.contrib.sqlalchemy.repository import SQLAlchemyAsyncRepository
|
||||
from litestar.dto import DTOConfig
|
||||
from sqlalchemy.orm import Mapped
|
||||
|
||||
from app.lib import service
|
||||
|
||||
|
||||
class Machine(BigIntBase):
|
||||
__tablename__ = "machines"
|
||||
caption: Mapped[str]
|
||||
|
||||
|
||||
class Repository(SQLAlchemyAsyncRepository[Machine]):
|
||||
model_type = Machine
|
||||
|
||||
class Service(service.Service[Machine]):
|
||||
repository_type = Repository
|
||||
|
||||
|
||||
# write_config = DTOConfig(exclude={"created_at", "updated_at", "nationality"})
|
||||
write_config = DTOConfig()
|
||||
MachineWriteDTO = SQLAlchemyDTO[Annotated[Machine, write_config]]
|
||||
MachineReadDTO = SQLAlchemyDTO[Machine]
|
||||
0
app/dto/__init__.py
Normal file
0
app/dto/__init__.py
Normal file
0
app/lib/__init__.py
Normal file
0
app/lib/__init__.py
Normal file
150
app/lib/dependencies.py
Normal file
150
app/lib/dependencies.py
Normal file
@ -0,0 +1,150 @@
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from litestar.contrib.repository.filters import BeforeAfter, CollectionFilter, FilterTypes, LimitOffset
|
||||
from litestar.di import Provide
|
||||
from litestar.params import Dependency, Parameter
|
||||
|
||||
DEFAULT_PAGINATION_LIMIT = 20
|
||||
|
||||
__all__ = [
|
||||
"create_collection_dependencies",
|
||||
"provide_created_filter",
|
||||
"provide_filter_dependencies",
|
||||
"provide_id_filter",
|
||||
"provide_limit_offset_pagination",
|
||||
"provide_updated_filter",
|
||||
]
|
||||
|
||||
|
||||
DTorNone = datetime | None
|
||||
|
||||
CREATED_FILTER_DEPENDENCY_KEY = "created_filter"
|
||||
FILTERS_DEPENDENCY_KEY = "filters"
|
||||
ID_FILTER_DEPENDENCY_KEY = "id_filter"
|
||||
LIMIT_OFFSET_DEPENDENCY_KEY = "limit_offset"
|
||||
UPDATED_FILTER_DEPENDENCY_KEY = "updated_filter"
|
||||
|
||||
|
||||
def provide_id_filter(
|
||||
ids: list[UUID] | None = Parameter(query="ids", default=None, required=False)
|
||||
) -> CollectionFilter[UUID]:
|
||||
"""Return type consumed by ``Repository.filter_in_collection()``.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
ids : list[UUID] | None
|
||||
Parsed out of comma separated list of values in query params.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
CollectionFilter[UUID]
|
||||
"""
|
||||
return CollectionFilter(field_name="id", values=ids or [])
|
||||
|
||||
|
||||
def provide_created_filter(
|
||||
before: DTorNone = Parameter(query="created-before", default=None, required=False),
|
||||
after: DTorNone = Parameter(query="created-after", default=None, required=False),
|
||||
) -> BeforeAfter:
|
||||
"""Return type consumed by `Repository.filter_on_datetime_field()`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
before : datetime | None
|
||||
Filter for records updated before this date/time.
|
||||
after : datetime | None
|
||||
Filter for records updated after this date/time.
|
||||
"""
|
||||
return BeforeAfter("created_at", before, after)
|
||||
|
||||
|
||||
def provide_updated_filter(
|
||||
before: DTorNone = Parameter(query="updated-before", default=None, required=False),
|
||||
after: DTorNone = Parameter(query="updated-after", default=None, required=False),
|
||||
) -> BeforeAfter:
|
||||
"""Return type consumed by `Repository.filter_on_datetime_field()`.
|
||||
Parameters
|
||||
----------
|
||||
before : datetime | None
|
||||
Filter for records updated before this date/time.
|
||||
after : datetime | None
|
||||
Filter for records updated after this date/time.
|
||||
"""
|
||||
return BeforeAfter("updated_at", before, after)
|
||||
|
||||
|
||||
def provide_limit_offset_pagination(
|
||||
page: int = Parameter(ge=1, default=1, required=False),
|
||||
page_size: int = Parameter(
|
||||
query="page-size",
|
||||
ge=1,
|
||||
default=DEFAULT_PAGINATION_LIMIT,
|
||||
required=False,
|
||||
),
|
||||
) -> LimitOffset:
|
||||
"""Return type consumed by `Repository.apply_limit_offset_pagination()`.
|
||||
Parameters
|
||||
----------
|
||||
page : int
|
||||
LIMIT to apply to select.
|
||||
page_size : int
|
||||
OFFSET to apply to select.
|
||||
"""
|
||||
return LimitOffset(page_size, page_size * (page - 1))
|
||||
|
||||
|
||||
def provide_filter_dependencies(
|
||||
created_filter: BeforeAfter = Dependency(skip_validation=True),
|
||||
updated_filter: BeforeAfter = Dependency(skip_validation=True),
|
||||
id_filter: CollectionFilter = Dependency(skip_validation=True),
|
||||
limit_offset: LimitOffset = Dependency(skip_validation=True),
|
||||
) -> list[FilterTypes]:
|
||||
"""Common collection route filtering dependencies. Add all filters to any
|
||||
route by including this function as a dependency, e.g:
|
||||
|
||||
@get
|
||||
def get_collection_handler(filters: Filters) -> ...:
|
||||
...
|
||||
The dependency is provided at the application layer, so only need to inject the dependency where
|
||||
necessary.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
id_filter : repository.CollectionFilter
|
||||
Filter for scoping query to limited set of identities.
|
||||
created_filter : repository.BeforeAfter
|
||||
Filter for scoping query to instance creation date/time.
|
||||
updated_filter : repository.BeforeAfter
|
||||
Filter for scoping query to instance update date/time.
|
||||
limit_offset : repository.LimitOffset
|
||||
Filter for query pagination.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
list[FilterTypes]
|
||||
List of filters parsed from connection.
|
||||
"""
|
||||
return [
|
||||
created_filter,
|
||||
id_filter,
|
||||
limit_offset,
|
||||
updated_filter,
|
||||
]
|
||||
|
||||
|
||||
def create_collection_dependencies() -> dict[str, Provide]:
|
||||
"""Creates a dictionary of provides for pagination endpoints.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
dict[str, Provide]
|
||||
|
||||
"""
|
||||
return {
|
||||
LIMIT_OFFSET_DEPENDENCY_KEY: Provide(provide_limit_offset_pagination, sync_to_thread=False),
|
||||
UPDATED_FILTER_DEPENDENCY_KEY: Provide(provide_updated_filter, sync_to_thread=False),
|
||||
CREATED_FILTER_DEPENDENCY_KEY: Provide(provide_created_filter, sync_to_thread=False),
|
||||
ID_FILTER_DEPENDENCY_KEY: Provide(provide_id_filter, sync_to_thread=False),
|
||||
FILTERS_DEPENDENCY_KEY: Provide(provide_filter_dependencies, sync_to_thread=False),
|
||||
}
|
||||
68
app/lib/exceptions.py
Normal file
68
app/lib/exceptions.py
Normal file
@ -0,0 +1,68 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from litestar.contrib.repository.exceptions import (
|
||||
ConflictError as RepositoryConflictException,
|
||||
)
|
||||
from litestar.contrib.repository.exceptions import (
|
||||
NotFoundError as RepositoryNotFoundException,
|
||||
)
|
||||
from litestar.contrib.repository.exceptions import (
|
||||
RepositoryError as RepositoryException,
|
||||
)
|
||||
from litestar.exceptions import (
|
||||
HTTPException,
|
||||
InternalServerException,
|
||||
NotFoundException,
|
||||
)
|
||||
from litestar.middleware.exceptions.middleware import create_exception_response
|
||||
|
||||
from .service import ServiceError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from litestar.connection import Request
|
||||
from litestar.response import Response
|
||||
|
||||
__all__ = [
|
||||
"repository_exception_to_http_response",
|
||||
"service_exception_to_http_response",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConflictException(HTTPException):
|
||||
status_code = 409
|
||||
|
||||
|
||||
def repository_exception_to_http_response(request: "Request", exc: RepositoryException) -> "Response":
|
||||
"""Transform repository exceptions to HTTP exceptions.
|
||||
|
||||
Args:
|
||||
_: The request that experienced the exception.
|
||||
exc: Exception raised during handling of the request.
|
||||
|
||||
Returns:
|
||||
Exception response appropriate to the type of original exception.
|
||||
"""
|
||||
http_exc: type[HTTPException]
|
||||
if isinstance(exc, RepositoryNotFoundException):
|
||||
http_exc = NotFoundException
|
||||
elif isinstance(exc, RepositoryConflictException):
|
||||
http_exc = ConflictException
|
||||
else:
|
||||
http_exc = InternalServerException
|
||||
return create_exception_response(request, exc=http_exc())
|
||||
|
||||
|
||||
def service_exception_to_http_response(request: "Request", exc: ServiceError) -> "Response":
|
||||
"""Transform service exceptions to HTTP exceptions.
|
||||
|
||||
Args:
|
||||
_: The request that experienced the exception.
|
||||
exc: Exception raised during handling of the request.
|
||||
|
||||
Returns:
|
||||
Exception response appropriate to the type of original exception.
|
||||
"""
|
||||
return create_exception_response(request, InternalServerException())
|
||||
95
app/lib/service.py
Normal file
95
app/lib/service.py
Normal file
@ -0,0 +1,95 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Generic
|
||||
|
||||
from litestar.contrib.sqlalchemy.repository import ModelT
|
||||
|
||||
__all__ = ["Service", "ServiceError"]
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from litestar.contrib.repository import AbstractAsyncRepository, FilterTypes
|
||||
|
||||
|
||||
class ServiceError(Exception):
|
||||
"""Base class for `Service` related exceptions."""
|
||||
|
||||
|
||||
class Service(Generic[ModelT]):
|
||||
def __init__(self, repository: AbstractAsyncRepository[ModelT]) -> None:
|
||||
"""Generic Service object.
|
||||
|
||||
Args:
|
||||
repository: Instance conforming to `AbstractRepository` interface.
|
||||
"""
|
||||
self.repository = repository
|
||||
|
||||
async def create(self, data: ModelT) -> ModelT:
|
||||
"""Wraps repository instance creation.
|
||||
|
||||
Args:
|
||||
data: Representation to be created.
|
||||
|
||||
Returns:
|
||||
Representation of created instance.
|
||||
"""
|
||||
return await self.repository.add(data)
|
||||
|
||||
async def list(self, *filters: FilterTypes, **kwargs: Any) -> list[ModelT]:
|
||||
"""Wraps repository scalars operation.
|
||||
|
||||
Args:
|
||||
*filters: Collection route filters.
|
||||
**kwargs: Keyword arguments for attribute based filtering.
|
||||
|
||||
Returns:
|
||||
The list of instances retrieved from the repository.
|
||||
"""
|
||||
return await self.repository.list(*filters, **kwargs)
|
||||
|
||||
async def update(self, id_: Any, data: ModelT) -> ModelT:
|
||||
"""Wraps repository update operation.
|
||||
|
||||
Args:
|
||||
id_: Identifier of item to be updated.
|
||||
data: Representation to be updated.
|
||||
|
||||
Returns:
|
||||
Updated representation.
|
||||
"""
|
||||
return await self.repository.update(data)
|
||||
|
||||
async def upsert(self, id_: Any, data: ModelT) -> ModelT:
|
||||
"""Wraps repository upsert operation.
|
||||
|
||||
Args:
|
||||
id_: Identifier of the object for upsert.
|
||||
data: Representation for upsert.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
Updated or created representation.
|
||||
"""
|
||||
return await self.repository.upsert(data)
|
||||
|
||||
async def get(self, id_: Any) -> ModelT:
|
||||
"""Wraps repository scalar operation.
|
||||
|
||||
Args:
|
||||
id_: Identifier of instance to be retrieved.
|
||||
|
||||
Returns:
|
||||
Representation of instance with identifier `id_`.
|
||||
"""
|
||||
return await self.repository.get(id_)
|
||||
|
||||
async def delete(self, id_: Any) -> ModelT:
|
||||
"""Wraps repository delete operation.
|
||||
|
||||
Args:
|
||||
id_: Identifier of instance to be deleted.
|
||||
|
||||
Returns:
|
||||
Representation of the deleted instance.
|
||||
"""
|
||||
return await self.repository.delete(id_)
|
||||
Reference in New Issue
Block a user