How to use the klepto.archives.file_archive function in klepto

To help you get started, we’ve selected a few klepto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github uqfoundation / klepto / tests / test_cache_info.py View on Github external
def _test_hits(algorithm, maxsize=20, keymap=None,
               rangelimit=5, tries=1000, archived=False):

    @algorithm(maxsize=maxsize, keymap=keymap, purge=True)
    def f(x, y):
        return 3*x+y

    if archived:
        f.archive(file_archive('cache.pkl',cached=False))

    domain = list(range(rangelimit))
    domain += [float(i) for i in domain]
    for i in range(tries):
        r = f(choice(domain), choice(domain))

    f.dump()
   #print(f.info())
    return f.info()
github miurahr / pykakasi / src / pykakasi / scripts.py View on Github external
def __init__(self, dictname):
        self._dict = file_archive(Configurations().dictpath(dictname), {}, serialized=True)
        self._dict.load()
github JoergFranke / recnet / recnet / model_master.py View on Github external
def load(self, data_location):

        if not os.path.isfile(data_location):
            raise Warning("No saved parameters found")

        d = klepto.archives.file_archive(data_location, cached=True,serialized=True)
        d.load()
        weights = d['layer_weights']
        basic = d['p_basic']
        struct = d['p_struct']
        optimize= d['p_optimize']
        d.clear()

        return weights, basic, struct,optimize
github miurahr / pykakasi / src / pykakasi / kanji.py View on Github external
def __init__(self):
        if self._jisyo_table is None:
            with self._lock:
                if self._jisyo_table is None:
                    dictpath = Configurations().dictpath(Configurations().jisyo_kanwa)
                    self._jisyo_table = file_archive(dictpath, {}, serialized=True)
                    self._jisyo_table.load()
github shayneobrien / explicit-gan-eval / models / mnist_utils.py View on Github external
# Make a dataset out of the autoencoded images, copy attributes
            autoencoded_data = list_obj(results)
            autoencoded_data.__dict__ = dataset.__dict__.copy()

            # Store into dictionary
            cache[str(count)] = autoencoded_data

            # Reinitialize results for the next dataset
            results = []

        cache.dump()

    else:

        # Load data
        cache = file_archive(save_path + '/cached_preds.txt')
        cache.load()

    return cache["0"], cache["1"]