Skip to content

Commit

Permalink
Adds typing to the decorators
Browse files Browse the repository at this point in the history
  • Loading branch information
fabiob committed Apr 4, 2024
1 parent 63fb637 commit 70795f7
Show file tree
Hide file tree
Showing 4 changed files with 195 additions and 4 deletions.
2 changes: 2 additions & 0 deletions aiocache/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ class cached:
def __init__(
self,
ttl=SENTINEL,
*,
namespace="",
key_builder=None,
skip_cache_func=lambda x: False,
Expand Down Expand Up @@ -303,6 +304,7 @@ class multi_cached:
def __init__(
self,
keys_from_attr,
*,
namespace="",
key_builder=None,
skip_cache_func=lambda k, v: False,
Expand Down
185 changes: 185 additions & 0 deletions aiocache/decorators.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
from typing import (
Any,
Callable,
Concatenate,
Mapping,
ParamSpec,
Protocol,
Sequence,
Type,
TypeVar,
overload,
)

from aiocache import BaseCache, Cache
from aiocache.plugins import BasePlugin
from aiocache.serializers import BaseSerializer

Params = ParamSpec("Params")
ReturnType = TypeVar("ReturnType")
DecoratorKWArgs = TypeVar("DecoratorKWArgs")
SerializerType = TypeVar("SerializerType", bound=BaseSerializer)
CacheType = TypeVar("CacheType", bound=BaseCache)
MCReturnType = TypeVar("MCReturnType", bound=Mapping)
MCKey = TypeVar("MCKey")
MCVal = TypeVar("MCVal")

class CachedDecorator(Protocol[Params, ReturnType]):
def __call__(
self,
*args: Params.args,
cache_read: bool = True,
cache_write: bool = True,
aiocache_wait_for_write: bool = True,
**kwargs: Params.kwargs,
) -> ReturnType: ...

class CachedDecorated(CachedDecorator[CacheType, Params, ReturnType]):
cache: CacheType

class cached:
ttl: int | None
key_builder: Callable[Params, str] | None
skip_cache_func: Callable[[ReturnType], bool] | None
noself: bool
alias: str | None
cache: None

decorator: CachedDecorator[Params, ReturnType]

_cache: CacheType
_serializer: SerializerType
_namespace: str | None
_plugins: Sequence[BasePlugin] | None
_kwargs: dict[str, DecoratorKWArgs]

@overload
def __init__(
self,
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
alias: str,
**kwargs: DecoratorKWArgs,
): ...
@overload
def __init__(
self,
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
namespace: str | None = None,
serializer: SerializerType | None = None,
plugins: Sequence[BasePlugin] | None = None,
alias: None = None,
**kwargs: DecoratorKWArgs,
): ...
def __call__(
self, fn: Callable[Params, ReturnType]
) -> CachedDecorated[CacheType, Params, ReturnType]: ...
def get_cache_key(self, *args: Params.args, **kwargs: Params.kwargs) -> str: ...
async def get_from_cache(self, key: str) -> ReturnType | None: ...
async def set_in_cache(self, key: str, value: ReturnType) -> None: ...

class cached_stampede(cached):
lease: int

@overload
def __init__(
self,
lease: int = 2,
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
alias: str,
**kwargs: DecoratorKWArgs,
) -> CachedDecorated[CacheType, Params, ReturnType]: ...
@overload
def __init__(
self,
lease: int = 2,
ttl: int | None = None,
*,
key_builder: Callable[Params, str] | None = None,
skip_cache_func: Callable[[ReturnType], bool] | None = None,
cache: Type[CacheType] = Cache.MEMORY,
noself: bool = False,
namespace: str | None = None,
serializer: SerializerType | None = None,
plugins: Sequence[BasePlugin] | None = None,
alias: None = None,
**kwargs: DecoratorKWArgs,
) -> CachedDecorated[CacheType, Params, ReturnType]: ...

class multi_cached:
keys_from_attr: str
key_builder: Callable[Concatenate[MCKey, Callable[Params, MCReturnType], Params], str] | None
skip_cache_func: Callable[[MCKey, MCVal], bool] | None
ttl: int | None
alias: str | None
cache: None

decorator: CachedDecorator[Params, MCReturnType]

_cache: CacheType
_serializer: SerializerType
_namespace: str | None
_plugins: Sequence[BasePlugin] | None
_kwargs: dict[str, DecoratorKWArgs]

@overload
def __init__(
self,
keys_from_attr: str,
*,
key_builder: (
Callable[Concatenate[MCKey, Callable[Params, ReturnType], Params], str] | None
) = None,
skip_cache_func: Callable[[MCKey, MCVal], bool] | None = None,
ttl: int | None = None,
cache: Type[CacheType] = Cache.MEMORY,
alias: str,
**kwargs: DecoratorKWArgs,
): ...
@overload
def __init__(
self,
keys_from_attr: str,
*,
namespace: str | None = None,
key_builder: (
Callable[Concatenate[MCKey, Callable[Params, ReturnType], Params], str] | None
) = None,
skip_cache_func: Callable[[MCKey, MCVal], bool] | None = None,
ttl: int | None = None,
cache: Type[CacheType] = Cache.MEMORY,
serializer: SerializerType | None = None,
plugins: Sequence[BasePlugin] | None = None,
alias: None = None,
**kwargs: DecoratorKWArgs,
): ...
def __call__(
self, fn: Callable[Params, ReturnType]
) -> CachedDecorated[CacheType, Params, MCReturnType]: ...
def get_cache_keys(
self, f: Callable[Params, ReturnType], *args: Params.args, **kwargs: Params.kwargs
) -> str: ...
async def get_from_cache(self, *keys: MCKey) -> list[MCVal | None]: ...
async def set_in_cache(
self,
result: MCReturnType[MCKey, MCVal],
fn: Callable[Params, ReturnType],
fn_args: Params.args,
fn_kwargs: Params.kwargs,
) -> None: ...

def __getattr__(name: str) -> Any: ...
3 changes: 3 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,6 @@ source = aiocache
[coverage:report]
show_missing = true
skip_covered = true

[options.package_data]
{name} = py.typed, *.pyi
9 changes: 5 additions & 4 deletions tests/ut/test_decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def test_init(self):
def test_fails_at_instantiation(self):
with pytest.raises(TypeError):

@cached(wrong_param=1)
@cached(wrong_param=1) # type: ignore[misc]
async def fn() -> None:
"""Dummy function."""

Expand Down Expand Up @@ -373,7 +373,7 @@ def f():
def test_fails_at_instantiation(self):
with pytest.raises(TypeError):

@multi_cached(wrong_param=1)
@multi_cached(wrong_param=1) # type: ignore[misc]
async def fn() -> None:
"""Dummy function."""

Expand Down Expand Up @@ -476,8 +476,9 @@ async def test_cache_write_doesnt_wait_for_future(self, mocker, decorator, decor
mocker.spy(decorator, "set_in_cache")
with patch.object(decorator, "get_from_cache", autospec=True, return_value=[None, None]):
with patch("aiocache.decorators.asyncio.ensure_future", autospec=True):
await decorator_call(1, keys=["a", "b"], value="value",
aiocache_wait_for_write=False)
await decorator_call(
1, keys=["a", "b"], value="value", aiocache_wait_for_write=False
)

decorator.set_in_cache.assert_not_awaited()
decorator.set_in_cache.assert_called_once_with({"a": ANY, "b": ANY}, stub_dict, ANY, ANY)
Expand Down

0 comments on commit 70795f7

Please sign in to comment.