How to use the cachetools.func.lru_cache function in cachetools

To help you get started, we’ve selected a few cachetools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github opendatacube / datacube-core / datacube / index / _products.py View on Github external
def __init__(self, db, metadata_type_resource):
        """
        :type db: datacube.drivers.postgres._connections.PostgresDb
        :type metadata_type_resource: datacube.index._metadata_types.MetadataTypeResource
        """
        self._db = db
        self.metadata_type_resource = metadata_type_resource

        self.get_unsafe = lru_cache()(self.get_unsafe)
        self.get_by_name_unsafe = lru_cache()(self.get_by_name_unsafe)
github NervanaSystems / ngraph-neon / ngraph / frontends / onnx / onnx_importer / model_wrappers.py View on Github external
    @lru_cache(maxsize=1)
    def get_ng_axes(self):  # type: () -> Axes
        """
        Create an ngraph Axes object matching the shape of this value.
        """
        if self.type.sparse_tensor_type.elem_type != onnx_pb2.TensorProto.UNDEFINED:
            raise NotImplementedError('Sparse tensors (SparseTensorTypeProto) not supported yet.')

        shape = []
        for dim in self.type.tensor_type.shape.dim:
            if dim.dim_param:
                raise NotImplementedError('Symbolic variable representation of '
                                          'tensor shape (dim_param) not supported yet.')
            shape.append(dim.dim_value)

        return ng.make_axes(axes=make_pos_axes(shape))
github kootenpv / spacy_api / spacy_api / api.py View on Github external
@cachetools.func.lru_cache(maxsize=3000000)
def single(document, model="en", embeddings_path=None, attributes=None, local=False):
    attributes = convert_attr(attributes)
    needs_root = "root" in attributes
    nlp_ = get_nlp(model, embeddings_path)
    if local:
        sentences = nlp_(document)
    else:
        sentences = []
        for sent in nlp_(document).sents:
            tokenized_sentence = [{x: json_safety(token, x) for x in attributes}
                                  for token in sent]
            if needs_root:
                add_root_attribute(tokenized_sentence, sent)
            sentences.append(tokenized_sentence)
    return sentences
github opendatacube / datacube-core / datacube / index / _products.py View on Github external
def __init__(self, db, metadata_type_resource):
        """
        :type db: datacube.drivers.postgres._connections.PostgresDb
        :type metadata_type_resource: datacube.index._metadata_types.MetadataTypeResource
        """
        self._db = db
        self.metadata_type_resource = metadata_type_resource

        self.get_unsafe = lru_cache()(self.get_unsafe)
        self.get_by_name_unsafe = lru_cache()(self.get_by_name_unsafe)
github facultyai / faculty / faculty / _util / resolvers.py View on Github external
@lru_cache()
def resolve_project_id(session, project=None):
    """Resolve the ID of a project based on ID, name or the current context.

    This helper encapsulates logic for determining a project in three
    situations:

    * If ``None`` is passed as the project, or if no project is passed, the
      project will be inferred from the runtime context (i.e. environment
      variables), and so will correspond to the 'current project' when run
      inside Faculty platform.
    * If a ``uuid.UUID`` or a string containing a valid UUID is passed, this
      will be assumed to be the ID of the project and will be returned.
    * If any other string is passed, the Faculty platform will be queried for
      projects matching that name. If exactly one of that name is accessible to
      the user, its ID will be returned, otherwise a ``ValueError`` will be
      raised.
github quay / quay / data / model / _basequery.py View on Github external
@lru_cache(maxsize=1)
def _lookup_team_roles():
    return {role.name: role for role in TeamRole.select()}
github NervanaSystems / ngraph-neon / ngraph / frontends / onnx / onnx_importer / model_wrappers.py View on Github external
    @lru_cache(maxsize=1)
    def get_ng_variable(self):  # type: () -> Op
        """
        Create an ngraph variable node for this value.
        :return: AssignableTensorOp
        """
        axes = self.get_ng_axes()
        dtype = self.get_dtype()
        if self.has_initializer:
            initializer = self.get_initializer()
            return ng.variable(axes=axes, dtype=dtype,
                               initial_value=initializer.to_array()).named(self.name)
        return ng.variable(axes=axes, dtype=dtype).named(self.name)
github tensorflow / lucid / lucid / modelzoo / aligned_activations.py View on Github external
@lru_cache()
def layer_covariance(layer1, layer2=None):
    """Computes the covariance matrix between the neurons of two layers. If only one
    layer is passed, computes the symmetric covariance matrix of that layer."""
    layer2 = layer2 or layer1
    act1, act2 = layer1.activations, layer2.activations
    num_datapoints = act1.shape[0]  # cast to avoid numpy type promotion during division
    return np.matmul(act1.T, act2) / float(num_datapoints)
github quay / quay / endpoints / web.py View on Github external
@lru_cache(maxsize=1)
def _get_route_data():
    return swagger_route_data(include_internal=True, compact=True)
github quay / quay / data / model / label.py View on Github external
@lru_cache(maxsize=1)
def get_label_source_types():
    source_type_map = {}
    for kind in LabelSourceType.select():
        source_type_map[kind.id] = kind.name
        source_type_map[kind.name] = kind.id

    return source_type_map