How to use the plaso.lib.errors function in plaso

To help you get started, we’ve selected a few plaso examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github log2timeline / plaso / tests / cli / helpers / profiling.py View on Github external
profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)

    with self.assertRaises(errors.BadConfigOption):
      options = cli_test_lib.TestOptions()
      options.profiling_directory = '/bogus'

      profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)

    with self.assertRaises(errors.BadConfigOption):
      options = cli_test_lib.TestOptions()
      options.profiling_sample_rate = 'a'

      profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)

    with self.assertRaises(errors.BadConfigOption):
      options = cli_test_lib.TestOptions()
      options.profiling_sample_rate = 100

      profiling.ProfilingArgumentsHelper.ParseOptions(options, test_tool)
github log2timeline / plaso / tests / cli / image_export_tool.py View on Github external
test_file_path = self._GetTestFilePath(['image.qcow2'])
    self._SkipIfPathNotExists(test_file_path)

    output_writer = test_lib.TestOutputWriter(encoding='utf-8')
    test_tool = image_export_tool.ImageExportTool(output_writer=output_writer)

    options = test_lib.TestOptions()
    options.artifact_definitions_path = test_artifacts_path
    options.image = test_file_path

    test_tool.ParseOptions(options)

    options = test_lib.TestOptions()

    with self.assertRaises(errors.BadConfigOption):
      test_tool.ParseOptions(options)
github log2timeline / plaso / plaso / parsers / recycler.py View on Github external
file_object (dfvfs.FileIO): file-like object.
      record_offset (int): record offset.
      record_size (int): record size.

    Raises:
      ParseError: if the record cannot be read.
    """
    record_data = self._ReadData(file_object, record_offset, record_size)

    record_map = self._GetDataTypeMap('recycler_info2_file_entry')

    try:
      record = self._ReadStructureFromByteStream(
          record_data, record_offset, record_map)
    except (ValueError, errors.ParseError) as exception:
      raise errors.ParseError((
          'Unable to map record data at offset: 0x{0:08x} with error: '
          '{1!s}').format(record_offset, exception))

    codepage = parser_mediator.codepage or 'ascii'

    # The original filename can contain remnant data after the end-of-string
    # character.
    ascii_filename = record.original_filename.split(b'\x00')[0]

    try:
      ascii_filename = ascii_filename.decode(codepage)
    except UnicodeDecodeError:
      ascii_filename = ascii_filename.decode(codepage, errors='replace')

      parser_mediator.ProduceExtractionWarning(
          'unable to decode original filename.')
github log2timeline / plaso / plaso / parsers / custom_destinations.py View on Github external
parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      file_object (dfvfs.FileIO): a file-like object.

    Raises:
      UnableToParseFile: when the file cannot be parsed.
    """
    file_entry = parser_mediator.GetFileEntry()
    display_name = parser_mediator.GetDisplayName()

    file_header_map = self._GetDataTypeMap('custom_file_header')

    try:
      file_header, file_offset = self._ReadStructureFromFileObject(
          file_object, 0, file_header_map)
    except (ValueError, errors.ParseError) as exception:
      raise errors.UnableToParseFile((
          'Invalid Custom Destination: {0:s} - unable to parse file header '
          'with error: {1!s}').format(display_name, exception))

    if file_header.unknown1 != 2:
      raise errors.UnableToParseFile((
          'Unsupported Custom Destination file: {0:s} - invalid unknown1: '
          '{1:d}.').format(display_name, file_header.unknown1))

    if file_header.header_values_type > 2:
      raise errors.UnableToParseFile((
          'Unsupported Custom Destination file: {0:s} - invalid header value '
          'type: {1:d}.').format(display_name, file_header.header_values_type))

    if file_header.header_values_type == 0:
      data_map_name = 'custom_file_header_value_type_0'
github log2timeline / plaso / plaso / cli / analysis_tool.py View on Github external
def _ParseStorageFileOptions(self, options):
    """Parses the storage file options.

    Args:
      options: the command line arguments (instance of argparse.Namespace).

    Raises:
      BadConfigOption: if the options are invalid.
    """
    self._storage_file_path = self.ParseStringOption(options, u'storage_file')
    if not self._storage_file_path:
      raise errors.BadConfigOption(u'Missing storage file option.')

    if not os.path.isfile(self._storage_file_path):
      raise errors.BadConfigOption(
          u'No such storage file: {0:s}.'.format(self._storage_file_path))
github log2timeline / plaso / plaso / parsers / bash_history.py View on Github external
def ParseRecord(self, parser_mediator, key, structure):
    """Parses a record and produces a Bash history event.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      key (str): name of the parsed structure.
      structure (pyparsing.ParseResults): elements parsed from the file.

    Raises:
      ParseError: when the structure type is unknown.
    """
    if key != 'log_entry':
      raise errors.ParseError(
          'Unable to parse record, unknown structure: {0:s}'.format(key))

    event_data = BashHistoryEventData()
    event_data.command = self._GetValueFromStructure(structure, 'command')

    timestamp = self._GetValueFromStructure(structure, 'timestamp')
    date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
    parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / output / shared_elastic.py View on Github external
def SetCACertificatesPath(self, ca_certificates_path):
    """Sets the path to the CA certificates.

    Args:
      ca_certificates_path (str): path to file containing a list of root
        certificates to trust.

    Raises:
      BadConfigOption: if the CA certificates file does not exist.
    """
    if not ca_certificates_path:
      return

    if not os.path.exists(ca_certificates_path):
      raise errors.BadConfigOption(
          'No such certificate file: {0:s}.'.format(ca_certificates_path))

    self._ca_certs = ca_certificates_path
    logger.debug('Elasticsearch ca_certs: {0!s}'.format(ca_certificates_path))
github log2timeline / plaso / plaso / parsers / winreg_plugins / programscache.py View on Github external
registry_key.path, registry_value.name)

      shell_items_parser = shell_items.ShellItemsParser(display_name)
      shell_items_parser.ParseByteStream(
          parser_mediator, value_data[value_data_offset:],
          codepage=parser_mediator.codepage)

      link_target = shell_items_parser.CopyToPath()
      link_targets.append(link_target)

      value_data_offset += entry_header.data_size

      try:
        entry_footer = self._ReadStructureFromByteStream(
            value_data[value_data_offset:], value_data_offset, entry_footer_map)
      except (ValueError, errors.ParseError) as exception:
        parser_mediator.ProduceExtractionWarning((
            'unable to parse entry footer at offset: 0x{0:08x} '
            'with error: {1!s}').format(value_data_offset, exception))
        return

      value_data_offset += entry_footer_map.GetByteSize()

      sentinel = entry_footer.sentinel

    # TODO: recover remaining items.

    if known_folder_identifier:
      known_folder_identifier = '{0!s}'.format(known_folder_identifier)

    event_data = ExplorerProgramsCacheEventData()
    event_data.entries = ' '.join([
github log2timeline / plaso / plaso / parsers / esedb_plugins / srum.py View on Github external
if not value:
      return None

    value_length = len(value)
    if value_length not in (4, 8):
      raise errors.ParseError('Unsupported value data size: {0:d}'.format(
          value_length))

    if value_length == 4:
      floating_point_map = self._GetDataTypeMap('float32le')
    elif value_length == 8:
      floating_point_map = self._GetDataTypeMap('float64le')

    try:
      return self._ReadStructureFromByteStream(value, 0, floating_point_map)
    except (ValueError, errors.ParseError) as exception:
      raise errors.ParseError(
          'Unable to parse floating-point value with error: {0!s}'.format(
              exception))
github log2timeline / plaso / plaso / frontend / storage_media_frontend.py View on Github external
Raises:
      SourceScannerError: if the format of or within the source is
                           not supported or the source does not exist.
    """
    if (not source_path.startswith(u'\\\\.\\') and
        not os.path.exists(source_path)):
      raise errors.SourceScannerError(
          u'No such device, file or directory: {0:s}.'.format(source_path))

    # Use the dfVFS source scanner to do the actual scanning.
    self._scan_context.OpenSourcePath(source_path)

    try:
      self._source_scanner.Scan(self._scan_context)
    except dfvfs_errors.BackEndError as exception:
      raise errors.SourceScannerError(
          u'Unable to scan source, with error: {0:s}'.format(exception))

    return self._scan_context