diff --git a/src/config/constants/app.py b/src/config/constants/app.py index 3ae0a5a..357f13e 100644 --- a/src/config/constants/app.py +++ b/src/config/constants/app.py @@ -1,6 +1,11 @@ +from typing import Literal from uuid import uuid4 import datetime as dt +from beanie.odm.interfaces.find import FindType, DocumentProjectionType +from beanie.odm.queries.find import FindMany + + PROJECT_NAME = "backend_burger" S3_BUCKET_NAME = "backend-burger" S3_FOLDER_NAME = "logs" @@ -18,3 +23,11 @@ USER_CACHE_KEY = "users" SINGLE_USER_CACHE_DURATION = 60 * 60 USERS_CACHE_DURATION = 5 * 60 + +ITEMS_PER_PAGE = 100 +MAXIMUM_ITEMS_PER_PAGE = 500 + +SORT_OPERATIONS = Literal["asc", "desc"] +FIND_MANY_QUERY = FindMany[FindType] | FindMany[DocumentProjectionType] + +FILTER_OPERATIONS = Literal["=", "!=", ">", ">=", "<", "<=", "like"] diff --git a/src/main.py b/src/main.py index 63b8878..10c3dc3 100644 --- a/src/main.py +++ b/src/main.py @@ -14,7 +14,7 @@ ) from src.config.middleware import ExceptionHandlerMiddleware, LoggingMiddleware from src.config.services import setup_services -from src.routers import users, auth +from src.routers import poe, users, auth from src.schemas.responses import BaseResponse @@ -27,6 +27,7 @@ app.include_router(users.router) app.include_router(auth.router) +app.include_router(poe.router) app.add_exception_handler(status.HTTP_400_BAD_REQUEST, handle_invalid_input_exception) app.add_exception_handler(RequestValidationError, handle_validation_exception) diff --git a/src/models/poe.py b/src/models/poe.py index 0a21eac..05ba674 100644 --- a/src/models/poe.py +++ b/src/models/poe.py @@ -1,15 +1,7 @@ -from enum import Enum - from beanie import Link -from pydantic import Field, Json from src.models.common import DateMetadataDocument -from src.schemas.poe import ItemPrice - - -class ItemIdType(str, Enum): - pay = "pay" - receive = "receive" +from src.schemas.poe import ItemBase class ItemCategory(DateMetadataDocument): @@ -26,19 +18,11 @@ class Settings: name = "poe_item_categories" -class Item(DateMetadataDocument): +class Item(ItemBase, DateMetadataDocument): """Item represents a Path of Exile in-game item. Each item belongs to a category. It contains information such as item type and the current, past and predicted pricing, encapsulated in the `ItemPrice` schema.""" - poe_ninja_id: int - id_type: ItemIdType | None = None - name: str category: Link[ItemCategory] - price: Json[ItemPrice] | None = None - type_: str | None = Field(None, serialization_alias="type") - variant: str | None = None - icon_url: str | None = None - enabled: bool = True class Settings: """Defines the settings for the collection.""" diff --git a/src/routers/poe.py b/src/routers/poe.py new file mode 100644 index 0000000..5562f75 --- /dev/null +++ b/src/routers/poe.py @@ -0,0 +1,29 @@ +from fastapi import APIRouter, Depends, Query + +from src import dependencies as deps +from src.schemas.web_responses import users as resp +from src.schemas.responses import AppResponse, BaseResponse +from src.services import poe as service + + +dependencies = [Depends(deps.check_access_token)] +router = APIRouter(prefix="/poe", tags=["Path of Exile"], dependencies=dependencies) + + +# TODO: add responses +@router.get("/categories", responses=resp.GET_USERS_RESPONSES) +async def get_all_categories(): + """Gets a list of all item categories from the database, mapped by their group names.""" + + item_categories = await service.get_item_categories() + item_category_mapping = service.group_item_categories(item_categories) + + return AppResponse(BaseResponse(data=item_category_mapping)) + + +@router.get("/items", responses=resp.GET_USERS_RESPONSES) +async def get_items_by_group(category_group: str = Query(..., min_length=3, max_length=50)): + """Gets a list of all items belonging to the given category group.""" + + items = await service.get_items_by_group(category_group) + return AppResponse(BaseResponse(data=items)) diff --git a/src/schemas/poe.py b/src/schemas/poe.py index eff7111..5090d86 100644 --- a/src/schemas/poe.py +++ b/src/schemas/poe.py @@ -2,7 +2,7 @@ from decimal import Decimal from enum import Enum -from pydantic import BaseModel +from pydantic import BaseModel, Field, Json class Currency(str, Enum): @@ -10,6 +10,11 @@ class Currency(str, Enum): divines = "divines" +class ItemIdType(str, Enum): + pay = "pay" + receive = "receive" + + class ItemPrice(BaseModel): """ItemPrice holds information regarding the current, past and future price of an item. It stores the recent and predicted prices in a dictionary, with the date as the key.""" @@ -20,3 +25,24 @@ class ItemPrice(BaseModel): price_history_currency: Currency price_prediction: dict[dt.datetime, Decimal] price_prediction_currency: Currency + + +class ItemCategoryResponse(BaseModel): + """ItemCategoryResponse holds the requisite subset of ItemCategory's data for API responses.""" + + name: str + internal_name: str + group: str + + +class ItemBase(BaseModel): + """ItemBase encapsulates core fields of the Item document.""" + + poe_ninja_id: int + id_type: ItemIdType | None = None + name: str + price: Json[ItemPrice] | None = None + type_: str | None = Field(None, serialization_alias="type") + variant: str | None = None + icon_url: str | None = None + enabled: bool = True diff --git a/src/schemas/requests.py b/src/schemas/requests.py new file mode 100644 index 0000000..06ab153 --- /dev/null +++ b/src/schemas/requests.py @@ -0,0 +1,45 @@ +from typing import Annotated +from fastapi import Body, Query +from pydantic import BaseModel, BeforeValidator, Field, computed_field + +from src.config.constants.app import FILTER_OPERATIONS, ITEMS_PER_PAGE, MAXIMUM_ITEMS_PER_PAGE, SORT_OPERATIONS + + +lowercase_validator = BeforeValidator(lambda v: v.lower()) + + +class PaginationInput(BaseModel): + """PaginationInput encapsulates query parameters required for a paginated response.""" + + page: int = Query(1, gt=0) + per_page: int = Query(ITEMS_PER_PAGE, gt=0, lte=MAXIMUM_ITEMS_PER_PAGE) + + @computed_field + @property + def offset(self) -> int: + """Calculates the offset value for use in database queries.""" + + return (self.page - 1) * self.per_page + + +class SortSchema(BaseModel): + """SortInput encapsulates the sorting schema model, requiring the field to sort on, and the sort operation type.""" + + field: Annotated[str, lowercase_validator] + operation: Annotated[SORT_OPERATIONS, lowercase_validator] + + +class FilterSchema(BaseModel): + """FilterSchema encapsulates the filter schema model, requiring a field, a valid operation and the value to filter on the field by.""" + + field: Annotated[str, lowercase_validator] + operation: Annotated[FILTER_OPERATIONS, lowercase_validator] + value: str = Field(min_length=1, max_length=200) + + +class FilterSortInput(BaseModel): + """FilterSortInput wraps filter and sort schema implementations, enabling them to be embedded as JSON body + parameters for FastAPI request handler functions.""" + + filter_: list[FilterSchema] | None = Field(Body(None, embed=True), alias="filter") + sort: list[SortSchema] | None = Field(Body(None, embed=True)) diff --git a/src/schemas/responses.py b/src/schemas/responses.py index 31746bd..f205828 100644 --- a/src/schemas/responses.py +++ b/src/schemas/responses.py @@ -4,6 +4,8 @@ from starlette import status from pydantic import BaseModel, Field, root_validator +from src.config.constants.app import MAXIMUM_ITEMS_PER_PAGE + # T represents any Pydantic BaseModel or Beanie Document, dict or list of BaseModel/Document or dict return types # TODO: define apt type constraints, currently failing with BaseModel constraint @@ -53,7 +55,7 @@ def __init__( """Dumps Pydantic models or keeps content as is, passing it to the parent `__init__` function.""" if isinstance(content, BaseResponse): - data = content.model_dump() + data = content.model_dump(mode="json") else: data = content @@ -66,3 +68,12 @@ def render(self, content: Any) -> bytes: return content return super().render(content) + + +class PaginationResponse(BaseModel): + """PaginationResponse encapsulates pagination values required by the client.""" + + page: int = Field(gt=0) + per_page: int = Field(gt=0, le=MAXIMUM_ITEMS_PER_PAGE) + total_items: int + total_pages: int diff --git a/src/services/poe.py b/src/services/poe.py new file mode 100644 index 0000000..66aac00 --- /dev/null +++ b/src/services/poe.py @@ -0,0 +1,61 @@ +from collections import defaultdict + +from fastapi import HTTPException +from loguru import logger +from starlette import status + +from src.models.poe import Item, ItemCategory +from src.schemas.poe import ItemBase, ItemCategoryResponse + + +async def get_item_categories() -> list[ItemCategoryResponse]: + """Gets all item category documents from the database, extracting only the required fields from the documents.""" + + try: + item_categories = await ItemCategory.find_all().project(ItemCategoryResponse).to_list() + except Exception as exc: + logger.error(f"error getting item categories: {exc}") + raise + + return item_categories + + +def group_item_categories(item_categories: list[ItemCategoryResponse]) -> dict[str, list[ItemCategoryResponse]]: + """Groups item category documents by their category group.""" + + item_category_mapping = defaultdict(list) + + for category in item_categories: + item_category_mapping[category.group].append(category) + + return item_category_mapping + + +async def get_items_by_group(category_group: str) -> list[ItemBase]: + """ + Gets items by given category group. Raises a 400 error if category group is invalid. + """ + + try: + item_category = await ItemCategory.find_one(ItemCategory.group == category_group) + except Exception as exc: + logger.error(f"error getting item category by group '{category_group}': {exc} ") + raise + + if item_category is None: + raise HTTPException(status.HTTP_400_BAD_REQUEST, "Invalid category group.") + + try: + items = ( + await Item.find( + Item.category.group == category_group, # type: ignore + fetch_links=True, + ) + .project(ItemBase) + .to_list() + ) + except Exception as exc: + logger.error(f"error getting item by category group '{category_group}': {exc}") + raise + + return items diff --git a/src/utils/routers.py b/src/utils/routers.py index 8d56add..e8e6927 100644 --- a/src/utils/routers.py +++ b/src/utils/routers.py @@ -1,9 +1,11 @@ -from typing import Awaitable +import math +from typing import Any, Awaitable from loguru import logger from redis.asyncio import Redis -from src.schemas.responses import BaseResponse +from src.schemas.requests import PaginationInput +from src.schemas.responses import BaseResponse, PaginationResponse from src.utils.services import cache_data, get_cached_data, serialize_response @@ -41,3 +43,18 @@ async def get_or_cache_serialized_entity( logger.debug(f"cached '{redis_key}' data") return serialized_entity + + +def create_pagination_response(data: Any, total_items: int, pagination: PaginationInput, data_key: str) -> BaseResponse: + """Creates a response structure warpped in a `BaseResponse`, calculating and assigning pagination values.""" + + per_page = pagination.per_page + total_pages = math.ceil(total_items / per_page) + + pagination_response = PaginationResponse( + page=pagination.page, per_page=pagination.per_page, total_items=total_items, total_pages=total_pages + ) + response_data = {data_key: data, "pagination": pagination_response} + + response = BaseResponse(data=response_data) + return response diff --git a/src/utils/services.py b/src/utils/services.py index 5a87c17..1358a4c 100644 --- a/src/utils/services.py +++ b/src/utils/services.py @@ -1,7 +1,17 @@ +import copy +import operator +from typing import Self, Type + +from beanie import Document +from beanie.odm.operators.find.evaluation import RegEx as RegExOperator +from fastapi import Body from loguru import logger import orjson +import pymongo from redis.asyncio import Redis, RedisError +from src.config.constants.app import FIND_MANY_QUERY +from src.schemas.requests import FilterSchema, PaginationInput, SortSchema from src.schemas.responses import E, T, BaseResponse @@ -47,3 +57,88 @@ async def delete_cached_data(key: str, redis_client: Redis) -> None: except RedisError as exc: logger.error(f"error deleting cached data: {exc}") raise + + +def sort_on_query(query: FIND_MANY_QUERY, model: Type[Document], sort: list[SortSchema] | None) -> FIND_MANY_QUERY: + """Parses, gathers and chains sort operations on the input query. Skips the process if sort input is empty.""" + + if not isinstance(sort, list): + return query + + sort_expressions = [] + + for entry in sort: + field = entry.field + operation = pymongo.ASCENDING if entry.operation == "asc" else pymongo.DESCENDING + + model_field = getattr(model, field) + expression = (model_field, operation) + + sort_expressions.append(expression) + + query = query.sort(sort_expressions) + return query + + +def filter_on_query( + query: FIND_MANY_QUERY, model: Type[Document], filter_: list[FilterSchema] | None +) -> FIND_MANY_QUERY: + """Parses, gathers and chains filter operations on the input query. Skips the process if filter input is empty.\n + Maps the operation list to operator arguments that allow using the operator dynamically, to create expressions + within the Beanie `find` method.""" + + if not isinstance(filter_, list): + return query + + operation_map = { + "=": operator.eq, + "!=": operator.ne, + ">": operator.gt, + "<": operator.lt, + ">=": operator.ge, + "<=": operator.le, + "like": RegExOperator, + } + + for entry in filter_: + field = entry.field + operation = entry.operation + operation_function = operation_map[operation] + value = entry.value + + model_field = getattr(model, field) + + if operation != "like": + query = query.find(operation_function(model_field, value)) + else: + operation_function = RegExOperator + options = "i" # case-insensitive search + + query = query.find(operation_function(model_field, value, options=options)) + + return query + + +class QueryChainer: + def __init__(self, initial_query: FIND_MANY_QUERY, model: Type[Document]) -> None: + self._query = initial_query + self.model = model + + def sort(self, sort: list[SortSchema] | None) -> Self: + self._query = sort_on_query(self._query, self.model, sort) + return self + + def filter(self, filter_: list[FilterSchema] | None) -> Self: + filter_on_query(self._query, self.model, filter_) + return self + + def paginate(self, pagination: PaginationInput) -> Self: + self._query.find(skip=pagination.offset, limit=pagination.per_page) + return self + + @property + def query(self) -> FIND_MANY_QUERY: + return self._query + + def clone(self) -> Self: + return copy.deepcopy(self) # type: ignore