How to use the dfvfs.resolver.resolver.Resolver.OpenFileEntry function in dfvfs

To help you get started, we’ve selected a few dfvfs examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github log2timeline / dfvfs / tests / lib / apfs_helper.py View on Github external
test_path = self._GetTestFilePath(['apfs_encrypted.dmg'])
    self._SkipIfPathNotExists(test_path)

    test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
        definitions.TYPE_INDICATOR_OS, location=test_path)
    test_raw_path_spec = path_spec_factory.Factory.NewPathSpec(
        definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec)
    test_tsk_partition_path_spec = path_spec_factory.Factory.NewPathSpec(
        definitions.TYPE_INDICATOR_TSK_PARTITION, location='/p1',
        parent=test_raw_path_spec)
    test_apfs_container_path_spec = path_spec_factory.Factory.NewPathSpec(
        definitions.TYPE_INDICATOR_APFS_CONTAINER, location='/apfs1',
        parent=test_tsk_partition_path_spec)

    container_file_entry = resolver.Resolver.OpenFileEntry(
        test_apfs_container_path_spec, resolver_context=resolver_context)
    fsapfs_volume = container_file_entry.GetAPFSVolume()

    is_unlocked = apfs_helper.APFSUnlockVolume(
        fsapfs_volume, test_apfs_container_path_spec,
        resolver.Resolver.key_chain)
    self.assertFalse(is_unlocked)

    resolver.Resolver.key_chain.SetCredential(
        test_apfs_container_path_spec, 'password', self._APFS_PASSWORD)

    is_unlocked = apfs_helper.APFSUnlockVolume(
        fsapfs_volume, test_apfs_container_path_spec,
        resolver.Resolver.key_chain)
    self.assertTrue(is_unlocked)
github log2timeline / plaso / tests / parsers / test_lib.py View on Github external
knowledge_base_values=None, timezone='UTC'):
    """Parses a file with a parser and writes results to a storage writer.

    Args:
      path_spec (dfvfs.PathSpec): path specification.
      parser (BaseParser): parser.
      collection_filters_helper (Optional[CollectionFiltersHelper]): collection
          filters helper.
      knowledge_base_values (Optional[dict]): knowledge base values.
      timezone (str): timezone.

    Returns:
      FakeStorageWriter: storage writer.
    """
    storage_writer = self._CreateStorageWriter()
    file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)
    parser_mediator = self._CreateParserMediator(
        storage_writer, collection_filters_helper=collection_filters_helper,
        file_entry=file_entry, knowledge_base_values=knowledge_base_values,
        timezone=timezone)

    if isinstance(parser, interface.FileEntryParser):
      parser.Parse(parser_mediator)

    elif isinstance(parser, interface.FileObjectParser):
      file_object = file_entry.GetFileObject()
      try:
        parser.Parse(parser_mediator, file_object)
      finally:
        file_object.close()

    else:
github log2timeline / plaso / tests / filters / file_entry.py View on Github external
def testMatches(self):
    """Tests the Matches function."""
    test_file_path = self._GetTestFilePath(['ímynd.dd'])
    self._SkipIfPathNotExists(test_file_path)

    os_path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)

    test_filter = file_entry_filters.ExtensionsFileEntryFilter(['txt'])

    # Test a filter match.
    tsk_path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_TSK, inode=15,
        location='/passwords.txt', parent=os_path_spec)

    file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec)
    self.assertTrue(test_filter.Matches(file_entry))

    # Test a filter non-match.
    tsk_path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_TSK, inode=16,
        location='/a_directory/another_file', parent=os_path_spec)

    file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec)
    self.assertFalse(test_filter.Matches(file_entry))

    # Test that fails because path specification has no location.
    tsk_path_spec = path_spec_factory.Factory.NewPathSpec(
        dfvfs_definitions.TYPE_INDICATOR_TSK, inode=15, parent=os_path_spec)

    file_entry = path_spec_resolver.Resolver.OpenFileEntry(tsk_path_spec)
    self.assertFalse(test_filter.Matches(file_entry))
github maurermj08 / vftools / dfvfs_utils / dfvfs_util.py View on Github external
def get_pathspec_information(self, pathspec):
        """Creates a dictionary of information about the pathspec, must open the file in memory"""
        pathspec_information = self.get_pathspec_basic_information(pathspec)
        file_entry = resolver.Resolver.OpenFileEntry(pathspec)

        stat_object = file_entry.GetStat()

        for attribute in [ 'mtime', 'atime', 'ctime', 'crtime', 'size', 'mode', 'uid', 'gid']:
            pathspec_information[attribute] = str(getattr(stat_object, attribute, ''))

        pathspec_information['inode'] = getattr(stat_object, 'ino', '')

        type = getattr(stat_object, 'type', '')
        if type:
            if type == definitions.FILE_ENTRY_TYPE_DEVICE:
                pathspec_information['type'] = 'device'
                pathspec_information['legacy_type'] = 'b/b'
            if type == definitions.FILE_ENTRY_TYPE_DIRECTORY:
                pathspec_information['type'] = 'dir'
                pathspec_information['legacy_type'] = 'd/d'
github log2timeline / plaso / plaso / cli / image_export_tool.py View on Github external
def _ExtractFileEntry(
      self, path_spec, destination_path, skip_duplicates=True):
    """Extracts a file entry.

    Args:
      path_spec (dfvfs.PathSpec): path specification of the source file.
      destination_path (str): path where the extracted files should be stored.
      skip_duplicates (Optional[bool]): True if files with duplicate content
          should be skipped.
    """
    file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)

    if not file_entry:
      logger.warning('Unable to open file entry for path spec: {0:s}'.format(
          path_spec.comparable))
      return

    if not self._filter_collection.Matches(file_entry):
      return

    file_entry_processed = False
    for data_stream in file_entry.data_streams:
      if self._abort:
        break
      self._ExtractDataStream(
          file_entry, data_stream.name, destination_path,
          skip_duplicates=skip_duplicates)
github log2timeline / plaso / plaso / engine / collector.py View on Github external
def CollectPathSpecs(self, path_spec, find_specs=None):
    """Collects path specification from a specific source.

    Args:
      path_spec: the path specification (instance of dfvfs.path.PathSpec)
                 to process.
      find_specs: optional list of find specifications (instances of
                  dfvfs.FindSpec).

    Yields:
      Path specifications (instances of dfvfs.PathSpec) of file entries
      found in the source.
    """
    try:
      file_entry = path_spec_resolver.Resolver.OpenFileEntry(
          path_spec, resolver_context=self._resolver_context)
    except (
        dfvfs_errors.AccessError, dfvfs_errors.BackEndError,
        dfvfs_errors.PathSpecError) as exception:
      logging.error(
          u'Unable to open file entry with error: {0:s}'.format(exception))
      return

    if not file_entry:
      logging.warning(u'Unable to open: {0:s}'.format(path_spec.comparable))
      return

    if (not file_entry.IsDirectory() and not file_entry.IsFile() and
        not file_entry.IsDevice()):
      logging.warning((
          u'Source path specification not a device, file or directory.\n'
github log2timeline / plaso / plaso / frontend / utils.py View on Github external
before: Optional number of bytes to include in the output before
              the event. The default is none.
      length: Optional number of lines to include in the output.
              The default is 20.

    Returns:
      A string that contains the hexadecimal representation of the event data.
    """
    if not event_object:
      return u'Missing event object.'

    if not hasattr(event_object, 'pathspec'):
      return u'Event object has no path specification.'

    try:
      file_entry = path_spec_resolver.Resolver.OpenFileEntry(
          event_object.pathspec)
    except IOError as exception:
      return u'Unable to open file with error: {0:s}'.format(exception)

    offset = getattr(event_object, 'offset', 0)
    if offset - before > 0:
      offset -= before

    file_object = file_entry.GetFileObject()
    file_object.seek(offset, os.SEEK_SET)
    data = file_object.read(int(length) * 16)
    file_object.close()

    return cls.GetHexDump(data, offset)
github log2timeline / plaso / plaso / frontend / preg.py View on Github external
all the discovered source path specifications (instance of PathSpec)
    and extracts Registry helper objects based on the supplied
    path.

    Args:
      path: the path filter to a Registry file.
      codepage: the codepage used for the Registry file.

    Yields:
      A Registry helper object (instance of PregRegistryHelper).
    """
    environment_variables = self.knowledge_base_object.GetEnvironmentVariables()

    for source_path_spec in self._source_path_specs:
      if source_path_spec.type_indicator == dfvfs_definitions.TYPE_INDICATOR_OS:
        file_entry = path_spec_resolver.Resolver.OpenFileEntry(source_path_spec)
        if file_entry.IsFile():
          yield PregRegistryHelper(
              file_entry, u'OS', self.knowledge_base_object, codepage=codepage)
          continue

        # TODO: Change this into an actual mount point path spec.
        self._mount_path_spec = source_path_spec

      collector_name = source_path_spec.type_indicator
      parent_path_spec = getattr(source_path_spec, u'parent', None)
      if parent_path_spec and parent_path_spec.type_indicator == (
          dfvfs_definitions.TYPE_INDICATOR_VSHADOW):
        vss_store = getattr(parent_path_spec, u'store_index', 0)
        collector_name = u'VSS Store: {0:d}'.format(vss_store)

      file_system, mount_point = self._GetSourceFileSystem(source_path_spec)
github log2timeline / dfvfs / examples / recursive_hasher2.py View on Github external
def CalculateHashes(self, base_path_specs, output_writer):
    """Recursive calculates hashes starting with the base path specification.

    Args:
      base_path_specs: a list of source path specification (instances
                       of dfvfs.PathSpec).
      output_writer: the output writer (instance of StdoutWriter).
    """
    for base_path_spec in base_path_specs:
      file_system = resolver.Resolver.OpenFileSystem(base_path_spec)
      file_entry = resolver.Resolver.OpenFileEntry(base_path_spec)
      if file_entry is None:
        logging.warning(
            u'Unable to open base path specification:\n{0:s}'.format(
                base_path_spec.comparable))
        continue

      self._CalculateHashesFileEntry(
          file_system, file_entry, u'', output_writer)