func.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. """`functools.lru_cache` compatible memoizing function decorators."""
  2. __all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache")
  3. import collections
  4. import functools
  5. import math
  6. import random
  7. import time
  8. try:
  9. from threading import RLock
  10. except ImportError: # pragma: no cover
  11. from dummy_threading import RLock
  12. from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
  13. from . import keys
  14. _CacheInfo = collections.namedtuple(
  15. "CacheInfo", ["hits", "misses", "maxsize", "currsize"]
  16. )
  17. class _UnboundCache(dict):
  18. @property
  19. def maxsize(self):
  20. return None
  21. @property
  22. def currsize(self):
  23. return len(self)
  24. class _UnboundTTLCache(TTLCache):
  25. def __init__(self, ttl, timer):
  26. TTLCache.__init__(self, math.inf, ttl, timer)
  27. @property
  28. def maxsize(self):
  29. return None
  30. def _cache(cache, typed):
  31. maxsize = cache.maxsize
  32. def decorator(func):
  33. key = keys.typedkey if typed else keys.hashkey
  34. hits = misses = 0
  35. lock = RLock()
  36. def wrapper(*args, **kwargs):
  37. nonlocal hits, misses
  38. k = key(*args, **kwargs)
  39. with lock:
  40. try:
  41. v = cache[k]
  42. hits += 1
  43. return v
  44. except KeyError:
  45. misses += 1
  46. v = func(*args, **kwargs)
  47. # in case of a race, prefer the item already in the cache
  48. try:
  49. with lock:
  50. return cache.setdefault(k, v)
  51. except ValueError:
  52. return v # value too large
  53. def cache_info():
  54. with lock:
  55. maxsize = cache.maxsize
  56. currsize = cache.currsize
  57. return _CacheInfo(hits, misses, maxsize, currsize)
  58. def cache_clear():
  59. nonlocal hits, misses
  60. with lock:
  61. try:
  62. cache.clear()
  63. finally:
  64. hits = misses = 0
  65. wrapper.cache_info = cache_info
  66. wrapper.cache_clear = cache_clear
  67. wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed}
  68. functools.update_wrapper(wrapper, func)
  69. return wrapper
  70. return decorator
  71. def fifo_cache(maxsize=128, typed=False):
  72. """Decorator to wrap a function with a memoizing callable that saves
  73. up to `maxsize` results based on a First In First Out (FIFO)
  74. algorithm.
  75. """
  76. if maxsize is None:
  77. return _cache(_UnboundCache(), typed)
  78. elif callable(maxsize):
  79. return _cache(FIFOCache(128), typed)(maxsize)
  80. else:
  81. return _cache(FIFOCache(maxsize), typed)
  82. def lfu_cache(maxsize=128, typed=False):
  83. """Decorator to wrap a function with a memoizing callable that saves
  84. up to `maxsize` results based on a Least Frequently Used (LFU)
  85. algorithm.
  86. """
  87. if maxsize is None:
  88. return _cache(_UnboundCache(), typed)
  89. elif callable(maxsize):
  90. return _cache(LFUCache(128), typed)(maxsize)
  91. else:
  92. return _cache(LFUCache(maxsize), typed)
  93. def lru_cache(maxsize=128, typed=False):
  94. """Decorator to wrap a function with a memoizing callable that saves
  95. up to `maxsize` results based on a Least Recently Used (LRU)
  96. algorithm.
  97. """
  98. if maxsize is None:
  99. return _cache(_UnboundCache(), typed)
  100. elif callable(maxsize):
  101. return _cache(LRUCache(128), typed)(maxsize)
  102. else:
  103. return _cache(LRUCache(maxsize), typed)
  104. def mru_cache(maxsize=128, typed=False):
  105. """Decorator to wrap a function with a memoizing callable that saves
  106. up to `maxsize` results based on a Most Recently Used (MRU)
  107. algorithm.
  108. """
  109. if maxsize is None:
  110. return _cache(_UnboundCache(), typed)
  111. elif callable(maxsize):
  112. return _cache(MRUCache(128), typed)(maxsize)
  113. else:
  114. return _cache(MRUCache(maxsize), typed)
  115. def rr_cache(maxsize=128, choice=random.choice, typed=False):
  116. """Decorator to wrap a function with a memoizing callable that saves
  117. up to `maxsize` results based on a Random Replacement (RR)
  118. algorithm.
  119. """
  120. if maxsize is None:
  121. return _cache(_UnboundCache(), typed)
  122. elif callable(maxsize):
  123. return _cache(RRCache(128, choice), typed)(maxsize)
  124. else:
  125. return _cache(RRCache(maxsize, choice), typed)
  126. def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False):
  127. """Decorator to wrap a function with a memoizing callable that saves
  128. up to `maxsize` results based on a Least Recently Used (LRU)
  129. algorithm with a per-item time-to-live (TTL) value.
  130. """
  131. if maxsize is None:
  132. return _cache(_UnboundTTLCache(ttl, timer), typed)
  133. elif callable(maxsize):
  134. return _cache(TTLCache(128, ttl, timer), typed)(maxsize)
  135. else:
  136. return _cache(TTLCache(maxsize, ttl, timer), typed)