How to use the cachetools.Cache function in cachetools

To help you get started, we’ve selected a few cachetools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tkem / cachetools / tests / test_wrapper.py View on Github external
def cache(self, minsize):
        return cachetools.Cache(maxsize=minsize)
github tkem / cachetools / tests / test_cache.py View on Github external
import unittest

import cachetools

from . import CacheTestMixin


class CacheTest(unittest.TestCase, CacheTestMixin):

    Cache = cachetools.Cache
github tkem / cachetools / cachetools.py View on Github external
This class randomly selects candidate items and discards them to
    make space when necessary.

    """

    def popitem(self):
        """Remove and return a random `(key, value)` pair."""
        try:
            key = random.choice(list(self))
        except IndexError:
            raise KeyError('cache is empty')
        return (key, self.pop(key))


class LFUCache(Cache):
    """Least Frequently Used (LFU) cache implementation.

    This class counts how often an item is retrieved, and discards the
    items used least often to make space when necessary.

    """

    def __init__(self, maxsize, getsizeof=None):
        if getsizeof is not None:
            Cache.__init__(self, maxsize, lambda e: getsizeof(e[0]))
        else:
            Cache.__init__(self, maxsize)

    def __getitem__(self, key, cache_getitem=Cache.__getitem__):
        entry = cache_getitem(self, key)
        entry[1] += 1
github tkem / cachetools / cachetools.py View on Github external
return entry[0]

    def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
        cache_setitem(self, key, [value, 0])

    def popitem(self):
        """Remove and return the `(key, value)` pair least frequently used."""
        items = ((key, Cache.__getitem__(self, key)[1]) for key in self)
        try:
            key, _ = min(items, key=operator.itemgetter(1))
        except ValueError:
            raise KeyError('cache is empty')
        return (key, self.pop(key))


class LRUCache(Cache):
    """Least Recently Used (LRU) cache implementation.

    This class discards the least recently used items first to make
    space when necessary.

    """

    def __init__(self, maxsize, getsizeof=None):
        if getsizeof is not None:
            Cache.__init__(self, maxsize, lambda e: getsizeof(e[0]))
        else:
            Cache.__init__(self, maxsize)
        root = _Link()
        root.prev = root.next = root
        self.__root = root
github vaexio / vaex / packages / vaex-core / vaex / caching.py View on Github external
self.key_to_path[key] = path

	def __delitem__(self, key):
		logger.debug("delete %r", key)
		path = self.path(key)
		os.remove(path)
		del self.key_to_path[key]

if __name__ == "__main__":
	logger.setLevel("DEBUG")
	def f(key):
		a, b = key
		return np.arange(a, b)
	np_dict = NumpyFileDict()
	#np_dict[(1,2)] = np.arange(10)
	cache = Cache(2, missing=f, dict_value=np_dict)
	print("cache[1,3] =", cache[(1, 3)], "...")
	print("cache[1,13] =", cache[(1, 13)], "...")
	print(cache[(2, 3)])
	print(cache[(3, 4)])
	print(cache[(3, 5)])
	print(cache[(3, 6)])
	print("keys", cache.keys())
	for ar in np_dict:
		print("-->", ar)
github tkem / cachetools / cachetools.py View on Github external
def maxsize(self):
        """Return the maximum size of the cache."""
        return self.__maxsize

    @property
    def currsize(self):
        """Return the current size of the cache."""
        return self.__currsize

    @staticmethod
    def getsizeof(value):
        """Return the size of a cache element."""
        return 1


class RRCache(Cache):
    """Random Replacement (RR) cache implementation.

    This class randomly selects candidate items and discards them to
    make space when necessary.

    """

    def popitem(self):
        """Remove and return a random `(key, value)` pair."""
        try:
            key = random.choice(list(self))
        except IndexError:
            raise KeyError('cache is empty')
        return (key, self.pop(key))
github gramener / gramex / gramex / services / ttlcache.py View on Github external
def pop(self, *args, **kwargs):
        with self.__timer:
            return Cache.pop(self, *args, **kwargs)
github tensorflow / federated / tensorflow_federated / python / core / impl / caching_executor.py View on Github external
def __init__(self, target_executor, cache=None):
    """Creates a new instance of this executor.

    Args:
      target_executor: An instance of `executor_base.Executor`.
      cache: The cache to use (must be an instance of `cachetools.Cache`). If
        unspecified, by default we construct a 1000-element LRU cache.
    """
    py_typecheck.check_type(target_executor, executor_base.Executor)
    if cache is not None:
      py_typecheck.check_type(cache, cachetools.Cache)
    else:
      cache = cachetools.LRUCache(_DEFAULT_CACHE_SIZE)
    self._target_executor = target_executor
    self._cache = cache
    self._num_values_created = 0
github gramener / gramex / gramex / services / ttlcache.py View on Github external
def expire(self, time=None):
        """Remove expired items from the cache."""
        if time is None:
            time = self.__timer()
        root = self.__root
        curr = root.next
        links = self.__links
        cache_delitem = Cache.__delitem__
        while curr is not root and curr.expire < time:
            cache_delitem(self, curr.key)
            del links[curr.key]
            next = curr.next
            curr.unlink()
            curr = next