Skip to content

Commit

Permalink
v0.0.1
Browse files Browse the repository at this point in the history
  • Loading branch information
howie6879 committed Mar 6, 2018
0 parents commit 409baf9
Show file tree
Hide file tree
Showing 14 changed files with 542 additions and 0 deletions.
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Expire

![PyPI](https://img.shields.io/pypi/v/expire.svg)

Expire aims to make using cache as convenient as possible.
43 changes: 43 additions & 0 deletions examples/cache_demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/usr/bin/env python
"""
Tips for human caches: there are several simple caching schemes we provide for you, and you can use the following ways.
- Use RedisCache,MemcachedCache,MemoryCache class directly
- Use cached decorator
"""
from expire import Settings
from expire import RedisCache, cached, CacheSetting


class MySettings(Settings):
"""
Create custom configuration
"""
cache = {
'cache_class': RedisCache,
'cache_config': {
'host': '127.0.0.1',
'port': 6379,
'db': 0,
'password': None
},
'serializer': None
}


@cached(**MySettings.cache, ttl=1000)
def parse(url, params=None, **kwargs):
return "{0}: {1}".format(url, 'hello')


def cached_by_redis(key):
cache_ins = CacheSetting(MySettings)
return cache_ins.get(key)


if __name__ == '__main__':
key = 'expire'
# Set
result = parse(url=key, dynamic_key=key)
print(result)
# Get
print(cached_by_redis(key))
16 changes: 16 additions & 0 deletions expire/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from .cache_setting import CacheSetting
from .decorator import cached
from .memcached_cache import MemcachedCache
from .memory_cache import MemoryCache
from .redis_cache import RedisCache
from .serializer import JsonSerializer, PickleSerializer, StrSerializer


class Settings:
"""Global Settings"""
cache = {
'cache_class': MemoryCache,
'cache_config': {},
'serializer': None,
'ttl': None
}
61 changes: 61 additions & 0 deletions expire/base_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#!/usr/bin/env python
import abc


class BaseCache(metaclass=abc.ABCMeta):
"""
The class defines some functions that is necessary provided by RedisCache MemoryCache MemcachedCache
"""

def __init__(self, serializer, namespace=None, **kwargs):
self.namespace = namespace
self.serializer = serializer()

@abc.abstractmethod
def set(self, key, value, ttl=None, **kwargs):
"""
Set the value at key ``key`` to ``value``
"""
pass

@abc.abstractmethod
def get(self, key, default=None, **kwargs):
"""
Return the value at key ``name``, or None if the key doesn't exist
"""
pass

@abc.abstractmethod
def delete(self, *keys, **kwargs):
"""
Delete one or more keys specified by ``keys``
"""
pass

@abc.abstractmethod
def exists(self, key, **kwargs):
"""
Returns a boolean indicating whether key ``name`` exists
"""
pass

@abc.abstractmethod
def incr(self, key, **kwargs):
"""
Increments the value of ``key``.
"""
pass


class BaseSerializer(metaclass=abc.ABCMeta):
"""
The class defines some functions that is necessary provided by JsonSerializer PickleSerializer
"""

@abc.abstractmethod
def dumps(self, value, **kwargs):
pass

@abc.abstractmethod
def loads(self, value, **kwargs):
pass
41 changes: 41 additions & 0 deletions expire/cache_setting.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#!/usr/bin/env python


from expire.memory_cache import MemoryCache



class CacheSetting:
"""
Cache setting configuration
cache_dict provides the basic configuration of the cache
- cache_class: such as MemoryCache RedisCache etc.
- cache_config: basic configuration, just like redis's host port db password etc.
- serializer: such as JsonSerializer PickleSerializer.
"""
cache_dict = {
'cache_class': MemoryCache,
'cache_config': {},
'serializer': None
}

def __init__(self, settings=None):
self.cache = getattr(settings, 'cache', self.cache_dict)
if not isinstance(self.cache.get('cache_config'), dict):
raise ValueError("Key cache_config must be a dict")
serializer = self.cache.get('serializer')
self.ttl = self.cache.get('ttl')
self.instance = self.cache['cache_class'](serializer=serializer, **self.cache['cache_config'])

def set(self, key, value, ttl=None, **kwargs):
ttl = ttl or self.ttl
return self.instance.set(key, value, ttl=ttl, **kwargs)

def get(self, key, default=None, **kwargs):
return self.instance.get(key, default=default, **kwargs)

def exists(self, key, **kwargs):
return self.instance.exists(key, **kwargs)

def incr(self, key, **kwargs):
return self.instance.incr(key, **kwargs)
57 changes: 57 additions & 0 deletions expire/decorator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/usr/bin/env python
from functools import wraps

from colorama import Fore

from expire.log import logger


def dec_connector(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
if self._cache_conn is None:
self._cache_conn = self._connector()
return func(self, *args, **kwargs)

return func(self, *args, **kwargs)

return wrapper


def cached(cache_class=None, key=None, ttl=None, serializer=None, cache_config=None, **kwargs):
"""
This decorator provides a caching mechanism for the data
:param cache_class: such as RedisCache MemcachedCache MemoryCache
:param key: key or dynamic_key
:param ttl: int seconds to store the data
:param serializer: serialize the value
:param kwargs:
:return:
"""

def cached_dec(func):
@wraps(func)
def wrapper(*args, **kwargs):
cache_key = key or kwargs.pop('dynamic_key', None)
if isinstance(cache_config, dict):
kwargs.update(cache_config)
cache_ins = cache_class(serializer=serializer, **kwargs)
try:
if cache_ins.exists(cache_key):
logger.info(Fore.YELLOW, 'Cache', 'Get<%s>' % cache_key)
return cache_ins.get(cache_key, **kwargs)
except Exception:
logger.exception('Cache', 'Get<%s>' % cache_key)
result = func(*args, **kwargs)
if result and cache_key:
try:
if cache_ins.set(cache_key, result, ttl=ttl, **kwargs):
logger.info(Fore.YELLOW, 'Cache', 'Set<%s>' % cache_key)
except Exception:
logger.exception('Cache', 'Set<%s>' % cache_key)

return result

return wrapper

return cached_dec
33 changes: 33 additions & 0 deletions expire/log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env python

import logging

import colorama
from colorama import Fore, Style

colorama.init(autoreset=True)


class Logger:
"""
Token from https://github.com/gaojiuli/toapi/blob/master/toapi/log.py
"""

def __init__(self, name, level=logging.DEBUG):
logging.basicConfig(format='%(asctime)s %(message)-10s ',
datefmt='%Y/%m/%d %H:%M:%S')

self.logger = logging.getLogger(name)
self.logger.setLevel(level)

def info(self, color, type, message):
self.logger.info(color + '[%-8s] %-2s %s' % (type, 'OK', message) + Style.RESET_ALL)

def error(self, type, message):
self.logger.error(Fore.RED + '[%-8s] %-4s %s' % (type, 'FAIL', message) + Style.RESET_ALL)

def exception(self, type, message):
self.error(type, message)


logger = Logger(__name__)
61 changes: 61 additions & 0 deletions expire/memcached_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#!/usr/bin/env python
from pymemcache.client.base import Client

from expire.base_cache import BaseCache
from expire.decorator import dec_connector
from expire.serializer import PickleSerializer


class MemcachedCache(BaseCache):
_cache_conn = None

def __init__(self, host="127.0.0.1", port=11211, connect_timeout=20, timeout=15, serializer=None,
**kwargs):
if serializer is None:
serializer = PickleSerializer
super().__init__(serializer=serializer, **kwargs)
self.host = host
self.port = port
self.connect_timeout = connect_timeout
self.timeout = timeout
self.kwargs = kwargs

@dec_connector
def set(self, key, value, ttl=None, **kwargs):
ttl = ttl or 0
result = self._cache_conn.set(key, self.serializer.dumps(value), expire=ttl)
return result

@dec_connector
def get(self, key, default=None, **kwargs):
result = self._cache_conn.get(key)
if result and not isinstance(self.serializer, PickleSerializer):
if isinstance(result, bytes):
result = bytes.decode(result)
return self.serializer.loads(result) if result is not None else default

@dec_connector
def delete(self, key, **kwargs):
result = self._cache_conn.delete(key)
return result

@dec_connector
def exists(self, key, **kwargs):
result = self._cache_conn.get(key)
return result is not None

@dec_connector
def incr(self, key, value=1, **kwargs):
if not self.exists(key):
result = value
self.set(key, result)
else:
result = self._cache_conn.incr(key, value=value)
return result

def _cache_client(self):
client = Client((self.host, self.port), self.kwargs)
return client

def _connector(self):
return self._cache_client()
50 changes: 50 additions & 0 deletions expire/memory_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#!/usr/bin/env python
import time

from expire.serializer import PickleSerializer
from expire.base_cache import BaseCache


class MemoryCache(BaseCache):
_cache = {}

def __init__(self, serializer=None, **kwargs):
if serializer is None:
serializer = PickleSerializer
super().__init__(serializer=serializer, **kwargs)

def set(self, key, value, ttl=None, **kwargs):
if ttl:
ttl = int(time.time()) + int(ttl)
value = self.serializer.dumps((value, ttl))
self._cache[key] = value
return True

def get(self, key, default=None, **kwargs):
set_default = self.serializer.dumps((default, None))
result = self._cache.get(key, set_default)
result_list = self.serializer.loads(result)
value = result_list[0]
if result_list[1] is not None:
ts = int(time.time())
if ts > result_list[1]:
self._delete(key=key)
value = None
return value if value is not None else default

def delete(self, *keys):
res = []
for key in keys:
res.append(self._delete(key))
return res

def exists(self, key, **kwargs):
return key in self._cache

def incr(self, key, **kwargs):
result = int(self.get(key, default=0)) + 1
self.set(key, result)
return result

def _delete(self, key):
return self._cache.pop(key, 0)
Loading

0 comments on commit 409baf9

Please sign in to comment.