How to use the cachetools.func function in cachetools

To help you get started, we’ve selected a few cachetools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tkem / cachetools / tests / test_func.py View on Github external
self.assertEqual(cached.cache_info(), (2, 1, 128, 1))


class LFUDecoratorTest(unittest.TestCase, DecoratorTestMixin):

    DECORATOR = staticmethod(cachetools.func.lfu_cache)


class LRUDecoratorTest(unittest.TestCase, DecoratorTestMixin):

    DECORATOR = staticmethod(cachetools.func.lru_cache)


class RRDecoratorTest(unittest.TestCase, DecoratorTestMixin):

    DECORATOR = staticmethod(cachetools.func.rr_cache)


class TTLDecoratorTest(unittest.TestCase, DecoratorTestMixin):

    DECORATOR = staticmethod(cachetools.func.ttl_cache)
github goldmansachs / gs-quant / gs_quant / timeseries / measures.py View on Github external
@cachetools.func.ttl_cache()  # fine as long as availability is not different between users
def _var_swap_tenors(asset: Asset):
    from gs_quant.session import GsSession

    aid = asset.get_marquee_id()
    body = GsSession.current._get(f"/data/markets/{aid}/availability")
    for r in body['data']:
        if r['dataField'] == Fields.VAR_SWAP.value:
            for f in r['filteredFields']:
                if f['field'] == Fields.TENOR.value:
                    return f['values']
    raise MqValueError("var swap is not available for " + aid)
github SBRG / ssbio / ssbio / structure / properties / pdbstart.py View on Github external
@cachetools.func.ttl_cache(maxsize=1000)
def get_pdb_res_starts(pdb_file):
    """Return a dictionary of the first residue number in each chain of a PDB file

    Args:
        pdb_file: path to PDB file

    Returns:
        start_residues: dictionary of {chainID: firstResNum, ...}

    """
    my_structure = PDBIOExt(pdb_file)
    model = my_structure.first_model

    start_residues = {}
    for chain in model:
        residues = chain.get_residues()
github gnocchixyz / gnocchi / gnocchi / cli / metricd.py View on Github external
def __init__(self, worker_id, conf):
        super(MetricProcessor, self).__init__(
            worker_id, conf, conf.metricd.metric_processing_delay)
        self._tasks = []
        self.group_state = None
        self.sacks_with_measures_to_process = set()
        # This stores the last time the processor did a scan on all the sack it
        # is responsible for
        self._last_full_sack_scan = utils.StopWatch().start()
        # Only update the list of sacks to process every
        # metric_processing_delay
        self._get_sacks_to_process = cachetools.func.ttl_cache(
            ttl=conf.metricd.metric_processing_delay
        )(self._get_sacks_to_process)
github SBRG / ssbio / ssbio / organisms / ecoli.py View on Github external
@cachetools.func.ttl_cache(maxsize=500)
def uniprot_info(bnumber):
    """Get the mapped UniProt ID and sequence for an E. coli locus id

    Args:
        bnumber: E. coli locus ID

    Returns:
        tuple of (uniprot ID, sequence)
    """
    uniprot_id = convert_bnumber_to_uniprot(bnumber)
    uniprot_seq = ssbio.databases.uniprot.get_fasta(uniprot_id)
    return(uniprot_id, uniprot_seq)
github lossme / TencentComicBook / api / views.py View on Github external
@cachetools.func.ttl_cache(maxsize=1024, ttl=3600, typed=False)
def get_comicbook(site, comicid):
    return ComicBook.create_comicbook(site=site, comicid=comicid)
github ethereum / trinity / trinity / components / builtin / tx_pool / validators.py View on Github external
    @cachetools.func.ttl_cache(maxsize=1024, ttl=300)
    def get_appropriate_tx_class(self) -> Type[SignedTransactionAPI]:
        head = self.chain.get_canonical_head()
        current_tx_class = self.chain.get_vm_class(head).get_transaction_class()

        # If the current head of the chain is still on a fork that is before the currently
        # active fork (syncing), ensure that we use the specified initial tx class
        if self.is_outdated_tx_class(current_tx_class):
            return self._initial_tx_class

        return current_tx_class
github ReutersMedia / sqs-browser-events / lib / dynamo_sessions.py View on Github external
@cachetools.func.ttl_cache(maxsize=10,ttl=30)
def get_all_sessions(max_expired_age=None):
    q = {'Select': 'ALL_ATTRIBUTES'}
    if max_expired_age is not None:
         q['FilterExpression'] = Attr('expires').gte(int(time.time()-max_expired_age))
    return collect_results(get_session_table().scan,q)
github SBRG / ssbio / ssbio / gempro / qcqa.py View on Github external
@cachetools.func.ttl_cache(maxsize=500, ttl=SEVEN_DAYS)
def get_best_structures(uniprot_id):
    """Use the PDBe REST service to query for the best PDB structures for a UniProt ID.

    More information found here: https://www.ebi.ac.uk/pdbe/api/doc/sifts.html
    Link used to retrieve results: https://www.ebi.ac.uk/pdbe/api/mappings/best_structures/:accession
    The list of PDB structures mapping to a UniProt accession sorted by coverage of the protein and, if the same, resolution.

    Args:
        uniprot_id: a valid UniProt ID

    Returns:
        A rank-ordered list of dictionaries, which contain these keys:
        pdb_id: the PDB ID which maps to the UniProt ID
        chain_id: the specific chain of the PDB which maps to the UniProt ID
        coverage: the percent coverage of the entire UniProt sequence
        resolution: the resolution of the structure
github quay / quay / buildman / manager / executor.py View on Github external
    @cachetools.func.ttl_cache(ttl=ONE_HOUR)
    def _get_coreos_ami(cls, ec2_region, coreos_channel):
        """
        Retrieve the CoreOS AMI id from the canonical listing.
        """
        stack_list_json = requests.get(EC2Executor.COREOS_STACK_URL % coreos_channel).json()
        stack_amis = stack_list_json['architectures'][EC2Executor.COREOS_STACK_ARCHITECTURE]['images']['aws']['regions']
        return stack_amis[ec2_region]['image']