You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

147 lines
3.9 KiB

"""`functools.lru_cache` compatible memoizing function decorators."""
import collections
import functools
import math
import random
import time
try:
from threading import RLock
except ImportError: # pragma: no cover
from dummy_threading import RLock
from . import keys
from .lfu import LFUCache
from .lru import LRUCache
from .rr import RRCache
from .ttl import TTLCache
__all__ = ('lfu_cache', 'lru_cache', 'rr_cache', 'ttl_cache')
_CacheInfo = collections.namedtuple('CacheInfo', [
'hits', 'misses', 'maxsize', 'currsize'
])
class _UnboundCache(dict):
@property
def maxsize(self):
return None
@property
def currsize(self):
return len(self)
class _UnboundTTLCache(TTLCache):
def __init__(self, ttl, timer):
TTLCache.__init__(self, math.inf, ttl, timer)
@property
def maxsize(self):
return None
def _cache(cache, typed):
maxsize = cache.maxsize
def decorator(func):
key = keys.typedkey if typed else keys.hashkey
lock = RLock()
stats = [0, 0]
def wrapper(*args, **kwargs):
k = key(*args, **kwargs)
with lock:
try:
v = cache[k]
stats[0] += 1
return v
except KeyError:
stats[1] += 1
v = func(*args, **kwargs)
try:
with lock:
cache[k] = v
except ValueError:
pass # value too large
return v
def cache_info():
with lock:
hits, misses = stats
maxsize = cache.maxsize
currsize = cache.currsize
return _CacheInfo(hits, misses, maxsize, currsize)
def cache_clear():
with lock:
try:
cache.clear()
finally:
stats[:] = [0, 0]
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
functools.update_wrapper(wrapper, func)
return wrapper
return decorator
def lfu_cache(maxsize=128, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Frequently Used (LFU)
algorithm.
"""
if maxsize is None:
return _cache(_UnboundCache(), typed)
elif callable(maxsize):
return _cache(LFUCache(128), typed)(maxsize)
else:
return _cache(LFUCache(maxsize), typed)
def lru_cache(maxsize=128, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Recently Used (LRU)
algorithm.
"""
if maxsize is None:
return _cache(_UnboundCache(), typed)
elif callable(maxsize):
return _cache(LRUCache(128), typed)(maxsize)
else:
return _cache(LRUCache(maxsize), typed)
def rr_cache(maxsize=128, choice=random.choice, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Random Replacement (RR)
algorithm.
"""
if maxsize is None:
return _cache(_UnboundCache(), typed)
elif callable(maxsize):
return _cache(RRCache(128, choice), typed)(maxsize)
else:
return _cache(RRCache(maxsize, choice), typed)
def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
"""Decorator to wrap a function with a memoizing callable that saves
up to `maxsize` results based on a Least Recently Used (LRU)
algorithm with a per-item time-to-live (TTL) value.
"""
if maxsize is None:
return _cache(_UnboundTTLCache(ttl, timer), typed)
elif callable(maxsize):
return _cache(TTLCache(128, ttl, timer), typed)(maxsize)
else:
return _cache(TTLCache(maxsize, ttl, timer), typed)