Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_decorator(self):
cache = self.cache(2)
wrapper = cachetools.cached(cache)(self.func)
self.assertEqual(len(cache), 0)
self.assertEqual(wrapper.__wrapped__, self.func)
self.assertEqual(wrapper(0), 0)
self.assertEqual(len(cache), 1)
self.assertIn(cachetools.keys.hashkey(0), cache)
self.assertNotIn(cachetools.keys.hashkey(1), cache)
self.assertNotIn(cachetools.keys.hashkey(1.0), cache)
self.assertEqual(wrapper(1), 1)
self.assertEqual(len(cache), 2)
self.assertIn(cachetools.keys.hashkey(0), cache)
self.assertIn(cachetools.keys.hashkey(1), cache)
self.assertIn(cachetools.keys.hashkey(1.0), cache)
self.assertEqual(wrapper(1), 1)
self.assertEqual(len(cache), 2)
self.assertEqual(wrapper(1.0), 1)
self.assertEqual(len(cache), 2)
self.assertEqual(wrapper(1.0), 1)
self.assertEqual(len(cache), 2)
wrapper = cachetools.cached(cache)(self.func)
self.assertEqual(len(cache), 0)
self.assertEqual(wrapper.__wrapped__, self.func)
self.assertEqual(wrapper(0), 0)
self.assertEqual(len(cache), 1)
self.assertIn(cachetools.keys.hashkey(0), cache)
self.assertNotIn(cachetools.keys.hashkey(1), cache)
self.assertNotIn(cachetools.keys.hashkey(1.0), cache)
self.assertEqual(wrapper(1), 1)
self.assertEqual(len(cache), 2)
self.assertIn(cachetools.keys.hashkey(0), cache)
self.assertIn(cachetools.keys.hashkey(1), cache)
self.assertIn(cachetools.keys.hashkey(1.0), cache)
self.assertEqual(wrapper(1), 1)
self.assertEqual(len(cache), 2)
self.assertEqual(wrapper(1.0), 1)
self.assertEqual(len(cache), 2)
self.assertEqual(wrapper(1.0), 1)
self.assertEqual(len(cache), 2)
def _robust_key(*args, **kwargs):
if 'proxies' in kwargs:
kwargs['proxies'] = json.dumps(kwargs['proxies'])
return keys.hashkey(*args, **kwargs)
def cachedcoromethod(cache, key=keys.hashkey):
"""
Caches results from a coroutine method in cache
"""
def decorator(method):
async def wrapper(self, *args, **kwargs):
k = key(*args, **kwargs)
try:
return cache[k]
except KeyError:
pass
value = await method(self, *args, **kwargs)
try:
cache[k] = value
except ValueError:
@cached(cache={}, key=lambda db, ds_id: hashkey(ds_id))
def _get_coords(db, ds_id):
@cachetools.cached(cachetools.TTLCache(100, 60*30), key=lambda wg, url: cachetools.keys.hashkey(url))
def get_spage(wg, url):
@cached(LRU_CACHE, key=functools.partial(hashkey, "spacy_lang"))
def load_spacy_lang(name, disable=None, allow_blank=False):
"""
Load a spaCy ``Language``: a shared vocabulary and language-specific data
for tokenizing text, and (if available) model data and a processing pipeline
containing a sequence of components for annotating a document.
An LRU cache saves languages in memory.
Args:
name (str or :class:`pathlib.Path`): spaCy language to load.
Could be a shortcut link, full package name, or path to model directory,
or a 2-letter ISO language code for which spaCy has language data.
disable (Tuple[str]): Names of pipeline components to disable, if any.
.. note:: Although spaCy's API specifies this argument as a list,
here we require a tuple. Pipelines are stored in the LRU cache
with unique identifiers generated from the hash of the function
@cached(cache.LRU_CACHE, key=functools.partial(hashkey, "char_weights"))
def get_char_weights(lang):
"""
Get lang-specific character weights for use in certain data augmentation transforms,
based on texts in :class:`textacy.datasets.UDHR`.
Args:
lang (str): Standard two-letter language code.
Returns:
List[Tuple[str, int]]: Collection of (character, weight) pairs, based on
the distribution of characters found in the source text.
"""
try:
char_weights = list(
collections.Counter(
char