How to use the peru.cache function in peru

To help you get started, we’ve selected a few peru examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github buildinspace / peru / tests / test_sync.py View on Github external
'''A no-op sync should be a single git command. Also check that index
        files are deleted after any sync error.'''
        module_dir = shared.create_dir({'foo': 'bar'})
        self.write_yaml(
            '''\
            cp module foo:
                path: {}

            imports:
                foo: subdir
            ''', module_dir)
        index_path = os.path.join(self.test_dir, '.peru/lastimports.index')

        # The first sync should take multiple operations and create a
        # lastimports.index file.
        peru.cache.DEBUG_GIT_COMMAND_COUNT = 0
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})
        assert peru.cache.DEBUG_GIT_COMMAND_COUNT > 1, \
            'The first sync should take multiple operations.'
        assert os.path.exists(index_path), \
            'The first sync should create an index file.'

        # The second sync should reuse the index file and only take one
        # operation.
        peru.cache.DEBUG_GIT_COMMAND_COUNT = 0
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})
        assert peru.cache.DEBUG_GIT_COMMAND_COUNT == 1, \
            'The second sync should take only one operation.'
        assert os.path.exists(index_path), \
            'The second sync should preserve the index file.'

        # Now force an error. This should delete the index file.
github buildinspace / peru / tests / test_sync.py View on Github external
assert os.path.exists(index_path), \
            'The first sync should create an index file.'

        # The second sync should reuse the index file and only take one
        # operation.
        peru.cache.DEBUG_GIT_COMMAND_COUNT = 0
        self.do_integration_test(['sync'], {'subdir/foo': 'bar'})
        assert peru.cache.DEBUG_GIT_COMMAND_COUNT == 1, \
            'The second sync should take only one operation.'
        assert os.path.exists(index_path), \
            'The second sync should preserve the index file.'

        # Now force an error. This should delete the index file.
        with open(os.path.join(self.test_dir, 'subdir/foo'), 'w') as f:
            f.write('dirty')
        with self.assertRaises(peru.cache.DirtyWorkingCopyError):
            run_peru_command(['sync'], self.test_dir)
        assert not os.path.exists(index_path), \
            'The error should delete the index file.'

        # Fix the error and resync with new module contents. This should
        # recreate the index file with the current tree and then succeed,
        # rather than using an empty index and treating the current files as
        # conflicting.
        with open(os.path.join(self.test_dir, 'subdir/foo'), 'w') as f:
            f.write('bar')
        with open(os.path.join(module_dir, 'foo'), 'w') as f:
            f.write('new bar')
        self.do_integration_test(['sync', '--no-cache'],
                                 {'subdir/foo': 'new bar'})
        assert os.path.exists(index_path), \
            'The index should have been recreated.'
github buildinspace / peru / tests / test_cache.py View on Github external
async def test_export_force_with_preexisting_files(self):
        # Create a working tree with a conflicting file.
        dirty_content = {'a': 'junk'}
        export_dir = create_dir(dirty_content)
        # Export should fail by default.
        with self.assertRaises(peru.cache.DirtyWorkingCopyError):
            await self.cache.export_tree(self.content_tree, export_dir)
        assert_contents(export_dir, dirty_content)
        # But it should suceed with the force flag.
        await self.cache.export_tree(self.content_tree, export_dir, force=True)
        assert_contents(export_dir, self.content)
github buildinspace / peru / peru / rule.py View on Github external
def _cache_key(self, input_tree):
        return cache.compute_key({
            'input_tree': input_tree,
            'copy': self.copy,
            'move': self.move,
            'executable': self.executable,
            'drop': self.drop,
            'pick': self.pick,
            'export': self.export,
        })
github buildinspace / peru / peru / plugin.py View on Github external
def _plugin_cache_key(definition, module_fields):
    assert definition.cache_fields, "Can't compute key for uncacheable type."
    return cache.compute_key({
        'type': definition.type,
        'cacheable_fields': {
            field: module_fields.get(field, None)
            for field in definition.cache_fields
        },
github buildinspace / peru / peru / rule.py View on Github external
async def _copy_files_modifications(_cache, tree, paths_multimap):
    modifications = {}
    for source in paths_multimap:
        source_info_dict = await _cache.ls_tree(tree, source)
        if not source_info_dict:
            raise NoMatchingFilesError(
                'Path "{}" does not exist.'.format(source))
        source_info = list(source_info_dict.items())[0][1]
        for dest in paths_multimap[source]:
            # If dest is a directory, put the source inside dest instead of
            # overwriting dest entirely.
            dest_is_dir = False
            dest_info_dict = await _cache.ls_tree(tree, dest)
            if dest_info_dict:
                dest_info = list(dest_info_dict.items())[0][1]
                dest_is_dir = (dest_info.type == cache.TREE_TYPE)
            adjusted_dest = dest
            if dest_is_dir:
                adjusted_dest = str(
                    PurePosixPath(dest) / PurePosixPath(source).name)
            modifications[adjusted_dest] = source_info
    return modifications