OLD | NEW |
(Empty) | |
| 1 """`functools.lru_cache` compatible memoizing function decorators.""" |
| 2 |
| 3 from __future__ import absolute_import |
| 4 |
| 5 import collections |
| 6 import functools |
| 7 import random |
| 8 import time |
| 9 |
| 10 try: |
| 11 from threading import RLock |
| 12 except ImportError: |
| 13 from dummy_threading import RLock |
| 14 |
| 15 from . import keys |
| 16 from .lfu import LFUCache |
| 17 from .lru import LRUCache |
| 18 from .rr import RRCache |
| 19 from .ttl import TTLCache |
| 20 |
| 21 __all__ = ('lfu_cache', 'lru_cache', 'rr_cache', 'ttl_cache') |
| 22 |
| 23 |
| 24 _CacheInfo = collections.namedtuple('CacheInfo', [ |
| 25 'hits', 'misses', 'maxsize', 'currsize' |
| 26 ]) |
| 27 |
| 28 |
| 29 def _cache(cache, typed=False): |
| 30 def decorator(func): |
| 31 key = keys.typedkey if typed else keys.hashkey |
| 32 lock = RLock() |
| 33 stats = [0, 0] |
| 34 |
| 35 def cache_info(): |
| 36 with lock: |
| 37 hits, misses = stats |
| 38 maxsize = cache.maxsize |
| 39 currsize = cache.currsize |
| 40 return _CacheInfo(hits, misses, maxsize, currsize) |
| 41 |
| 42 def cache_clear(): |
| 43 with lock: |
| 44 try: |
| 45 cache.clear() |
| 46 finally: |
| 47 stats[:] = [0, 0] |
| 48 |
| 49 def wrapper(*args, **kwargs): |
| 50 k = key(*args, **kwargs) |
| 51 with lock: |
| 52 try: |
| 53 v = cache[k] |
| 54 stats[0] += 1 |
| 55 return v |
| 56 except KeyError: |
| 57 stats[1] += 1 |
| 58 v = func(*args, **kwargs) |
| 59 try: |
| 60 with lock: |
| 61 cache[k] = v |
| 62 except ValueError: |
| 63 pass # value too large |
| 64 return v |
| 65 functools.update_wrapper(wrapper, func) |
| 66 if not hasattr(wrapper, '__wrapped__'): |
| 67 wrapper.__wrapped__ = func # Python 2.7 |
| 68 wrapper.cache_info = cache_info |
| 69 wrapper.cache_clear = cache_clear |
| 70 return wrapper |
| 71 return decorator |
| 72 |
| 73 |
| 74 def lfu_cache(maxsize=128, typed=False): |
| 75 """Decorator to wrap a function with a memoizing callable that saves |
| 76 up to `maxsize` results based on a Least Frequently Used (LFU) |
| 77 algorithm. |
| 78 |
| 79 """ |
| 80 return _cache(LFUCache(maxsize), typed) |
| 81 |
| 82 |
| 83 def lru_cache(maxsize=128, typed=False): |
| 84 """Decorator to wrap a function with a memoizing callable that saves |
| 85 up to `maxsize` results based on a Least Recently Used (LRU) |
| 86 algorithm. |
| 87 |
| 88 """ |
| 89 return _cache(LRUCache(maxsize), typed) |
| 90 |
| 91 |
| 92 def rr_cache(maxsize=128, choice=random.choice, typed=False): |
| 93 """Decorator to wrap a function with a memoizing callable that saves |
| 94 up to `maxsize` results based on a Random Replacement (RR) |
| 95 algorithm. |
| 96 |
| 97 """ |
| 98 return _cache(RRCache(maxsize, choice), typed) |
| 99 |
| 100 |
| 101 def ttl_cache(maxsize=128, ttl=600, timer=time.time, typed=False): |
| 102 """Decorator to wrap a function with a memoizing callable that saves |
| 103 up to `maxsize` results based on a Least Recently Used (LRU) |
| 104 algorithm with a per-item time-to-live (TTL) value. |
| 105 """ |
| 106 return _cache(TTLCache(maxsize, ttl, timer), typed) |
OLD | NEW |